summaryrefslogtreecommitdiffstats
path: root/media/libstagefright
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright')
-rw-r--r--media/libstagefright/AACWriter.cpp24
-rw-r--r--media/libstagefright/ACodec.cpp126
-rw-r--r--media/libstagefright/AMRWriter.cpp13
-rw-r--r--media/libstagefright/Android.mk10
-rw-r--r--media/libstagefright/FileSource.cpp4
-rw-r--r--media/libstagefright/HTTPBase.cpp6
-rw-r--r--media/libstagefright/MPEG2TSWriter.cpp19
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp291
-rw-r--r--media/libstagefright/MPEG4Writer.cpp186
-rw-r--r--media/libstagefright/MediaClock.cpp153
-rw-r--r--media/libstagefright/MediaCodec.cpp485
-rw-r--r--media/libstagefright/MediaCodecList.cpp259
-rw-r--r--media/libstagefright/MediaCodecListOverrides.cpp404
-rw-r--r--media/libstagefright/MediaCodecListOverrides.h50
-rw-r--r--media/libstagefright/MediaCodecSource.cpp34
-rw-r--r--media/libstagefright/MediaMuxer.cpp15
-rw-r--r--media/libstagefright/MediaSync.cpp541
-rw-r--r--media/libstagefright/NuCachedSource2.cpp6
-rw-r--r--media/libstagefright/ProcessInfo.cpp53
-rw-r--r--media/libstagefright/SampleTable.cpp21
-rw-r--r--media/libstagefright/StagefrightMetadataRetriever.cpp6
-rw-r--r--media/libstagefright/Utils.cpp73
-rw-r--r--media/libstagefright/avc_utils.cpp33
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp6
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp231
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.h14
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.cpp12
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp101
-rw-r--r--media/libstagefright/filters/Android.mk27
-rw-r--r--media/libstagefright/filters/ColorConvert.cpp111
-rw-r--r--media/libstagefright/filters/ColorConvert.h43
-rw-r--r--media/libstagefright/filters/GraphicBufferListener.cpp154
-rw-r--r--media/libstagefright/filters/GraphicBufferListener.h70
-rw-r--r--media/libstagefright/filters/IntrinsicBlurFilter.cpp99
-rw-r--r--media/libstagefright/filters/IntrinsicBlurFilter.h50
-rw-r--r--media/libstagefright/filters/MediaFilter.cpp818
-rw-r--r--media/libstagefright/filters/RSFilter.cpp96
-rw-r--r--media/libstagefright/filters/RSFilter.h53
-rw-r--r--media/libstagefright/filters/SaturationFilter.cpp99
-rw-r--r--media/libstagefright/filters/SaturationFilter.h52
-rw-r--r--media/libstagefright/filters/SimpleFilter.cpp39
-rw-r--r--media/libstagefright/filters/SimpleFilter.h52
-rw-r--r--media/libstagefright/filters/ZeroFilter.cpp57
-rw-r--r--media/libstagefright/filters/ZeroFilter.h43
-rw-r--r--media/libstagefright/filters/saturation.rs40
-rw-r--r--media/libstagefright/filters/saturationARGB.rs40
-rw-r--r--media/libstagefright/foundation/AHandler.cpp18
-rw-r--r--media/libstagefright/foundation/ALooper.cpp30
-rw-r--r--media/libstagefright/foundation/ALooperRoster.cpp128
-rw-r--r--media/libstagefright/foundation/AMessage.cpp106
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp1642
-rw-r--r--media/libstagefright/httplive/LiveSession.h136
-rw-r--r--media/libstagefright/httplive/M3UParser.cpp66
-rw-r--r--media/libstagefright/httplive/M3UParser.h2
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp998
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.h53
-rw-r--r--media/libstagefright/include/MPEG4Extractor.h6
-rw-r--r--media/libstagefright/include/avc_utils.h5
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp289
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h4
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp247
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.h11
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp100
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.h1
-rw-r--r--media/libstagefright/omx/Android.mk5
-rw-r--r--media/libstagefright/omx/FrameDropper.cpp70
-rw-r--r--media/libstagefright/omx/FrameDropper.h50
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp112
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h19
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp10
-rw-r--r--media/libstagefright/omx/SimpleSoftOMXComponent.cpp6
-rw-r--r--media/libstagefright/omx/tests/Android.mk18
-rw-r--r--media/libstagefright/omx/tests/FrameDropper_test.cpp136
-rw-r--r--media/libstagefright/rtsp/ARTPConnection.cpp8
-rw-r--r--media/libstagefright/rtsp/ARTPSession.cpp2
-rw-r--r--media/libstagefright/rtsp/ARTPWriter.cpp8
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp14
-rw-r--r--media/libstagefright/rtsp/MyHandler.h78
-rw-r--r--media/libstagefright/rtsp/MyTransmitter.h40
-rw-r--r--media/libstagefright/rtsp/SDPLoader.cpp2
-rw-r--r--media/libstagefright/rtsp/UDPPusher.cpp4
-rw-r--r--media/libstagefright/tests/Android.mk27
-rw-r--r--media/libstagefright/tests/MediaCodecListOverrides_test.cpp316
-rw-r--r--media/libstagefright/timedtext/TimedTextPlayer.cpp14
-rw-r--r--media/libstagefright/webm/WebmWriter.cpp32
-rw-r--r--media/libstagefright/webm/WebmWriter.h1
-rw-r--r--media/libstagefright/wifi-display/MediaSender.cpp4
-rw-r--r--media/libstagefright/wifi-display/rtp/RTPSender.cpp4
-rw-r--r--media/libstagefright/wifi-display/source/Converter.cpp15
-rw-r--r--media/libstagefright/wifi-display/source/MediaPuller.cpp12
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.cpp20
-rw-r--r--media/libstagefright/wifi-display/source/RepeaterSource.cpp2
-rw-r--r--media/libstagefright/wifi-display/source/WifiDisplaySource.cpp34
-rw-r--r--media/libstagefright/wifi-display/source/WifiDisplaySource.h3
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp12
95 files changed, 8151 insertions, 1888 deletions
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 2e41d80..9d90dbd 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -36,33 +36,19 @@
namespace android {
-AACWriter::AACWriter(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mStarted(false),
- mPaused(false),
- mResumed(false),
- mChannelCount(-1),
- mSampleRate(-1),
- mAACProfile(OMX_AUDIO_AACObjectLC) {
-
- ALOGV("AACWriter Constructor");
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
AACWriter::AACWriter(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
mStarted(false),
mPaused(false),
mResumed(false),
+ mThread(0),
+ mEstimatedSizeBytes(0),
+ mEstimatedDurationUs(0),
mChannelCount(-1),
mSampleRate(-1),
- mAACProfile(OMX_AUDIO_AACObjectLC) {
+ mAACProfile(OMX_AUDIO_AACObjectLC),
+ mFrameDurationUs(0) {
}
AACWriter::~AACWriter() {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d298cb1..45f6339 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -419,6 +419,7 @@ ACodec::ACodec()
mMetaDataBuffersToSubmit(0),
mRepeatFrameDelayUs(-1ll),
mMaxPtsGapUs(-1ll),
+ mMaxFps(-1),
mTimePerFrameUs(-1ll),
mTimePerCaptureUs(-1ll),
mCreateInputBuffersSuspended(false),
@@ -451,61 +452,61 @@ void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
void ACodec::initiateSetup(const sp<AMessage> &msg) {
msg->setWhat(kWhatSetup);
- msg->setTarget(id());
+ msg->setTarget(this);
msg->post();
}
void ACodec::signalSetParameters(const sp<AMessage> &params) {
- sp<AMessage> msg = new AMessage(kWhatSetParameters, id());
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
msg->setMessage("params", params);
msg->post();
}
void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
msg->setWhat(kWhatAllocateComponent);
- msg->setTarget(id());
+ msg->setTarget(this);
msg->post();
}
void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
msg->setWhat(kWhatConfigureComponent);
- msg->setTarget(id());
+ msg->setTarget(this);
msg->post();
}
void ACodec::initiateCreateInputSurface() {
- (new AMessage(kWhatCreateInputSurface, id()))->post();
+ (new AMessage(kWhatCreateInputSurface, this))->post();
}
void ACodec::signalEndOfInputStream() {
- (new AMessage(kWhatSignalEndOfInputStream, id()))->post();
+ (new AMessage(kWhatSignalEndOfInputStream, this))->post();
}
void ACodec::initiateStart() {
- (new AMessage(kWhatStart, id()))->post();
+ (new AMessage(kWhatStart, this))->post();
}
void ACodec::signalFlush() {
ALOGV("[%s] signalFlush", mComponentName.c_str());
- (new AMessage(kWhatFlush, id()))->post();
+ (new AMessage(kWhatFlush, this))->post();
}
void ACodec::signalResume() {
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
}
void ACodec::initiateShutdown(bool keepComponentAllocated) {
- sp<AMessage> msg = new AMessage(kWhatShutdown, id());
+ sp<AMessage> msg = new AMessage(kWhatShutdown, this);
msg->setInt32("keepComponentAllocated", keepComponentAllocated);
msg->post();
if (!keepComponentAllocated) {
// ensure shutdown completes in 3 seconds
- (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000);
+ (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000);
}
}
void ACodec::signalRequestIDRFrame() {
- (new AMessage(kWhatRequestIDRFrame, id()))->post();
+ (new AMessage(kWhatRequestIDRFrame, this))->post();
}
// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
@@ -516,7 +517,7 @@ void ACodec::signalRequestIDRFrame() {
void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() {
if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
mMetaDataBuffersToSubmit > 0) {
- (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post();
+ (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, this))->post();
}
}
@@ -1259,6 +1260,10 @@ status_t ACodec::configureCodec(
mMaxPtsGapUs = -1ll;
}
+ if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
+ mMaxFps = -1;
+ }
+
if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
mTimePerCaptureUs = -1ll;
}
@@ -1675,6 +1680,21 @@ status_t ACodec::configureCodec(
err = setMinBufferSize(kPortIndexInput, 8192); // XXX
}
+ int32_t priority;
+ if (msg->findInt32("priority", &priority)) {
+ err = setPriority(priority);
+ }
+
+ int32_t rateInt = -1;
+ float rateFloat = -1;
+ if (!msg->findFloat("operating-rate", &rateFloat)) {
+ msg->findInt32("operating-rate", &rateInt);
+ rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
+ }
+ if (rateFloat > 0) {
+ err = setOperatingRate(rateFloat, video);
+ }
+
mBaseOutputFormat = outputFormat;
CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
@@ -1685,6 +1705,50 @@ status_t ACodec::configureCodec(
return err;
}
+status_t ACodec::setPriority(int32_t priority) {
+ if (priority < 0) {
+ return BAD_VALUE;
+ }
+ OMX_PARAM_U32TYPE config;
+ InitOMXParams(&config);
+ config.nU32 = (OMX_U32)priority;
+ status_t temp = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority,
+ &config, sizeof(config));
+ if (temp != OK) {
+ ALOGI("codec does not support config priority (err %d)", temp);
+ }
+ return OK;
+}
+
+status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) {
+ if (rateFloat < 0) {
+ return BAD_VALUE;
+ }
+ OMX_U32 rate;
+ if (isVideo) {
+ if (rateFloat > 65535) {
+ return BAD_VALUE;
+ }
+ rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f);
+ } else {
+ if (rateFloat > UINT_MAX) {
+ return BAD_VALUE;
+ }
+ rate = (OMX_U32)(rateFloat);
+ }
+ OMX_PARAM_U32TYPE config;
+ InitOMXParams(&config);
+ config.nU32 = rate;
+ status_t err = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
+ &config, sizeof(config));
+ if (err != OK) {
+ ALOGI("codec does not support config operating rate (err %d)", err);
+ }
+ return OK;
+}
+
status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -4297,7 +4361,7 @@ void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
info->mData->meta()->clear();
notify->setBuffer("buffer", info->mData);
- sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec);
reply->setInt32("buffer-id", info->mBufferID);
notify->setMessage("reply", reply);
@@ -4557,7 +4621,7 @@ bool ACodec::BaseState::onOMXFillBufferDone(
}
sp<AMessage> reply =
- new AMessage(kWhatOutputBufferDrained, mCodec->id());
+ new AMessage(kWhatOutputBufferDrained, mCodec);
if (!mCodec->mSentFormat && rangeLength > 0) {
mCodec->sendFormatChange(reply);
@@ -4833,7 +4897,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
sp<IOMX> omx = client.interface();
- sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id());
+ sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
mDeathNotifier = new DeathNotifier(notify);
if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) {
@@ -4876,6 +4940,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
sp<CodecObserver> observer = new CodecObserver;
IOMX::node_id node = NULL;
+ status_t err = OMX_ErrorComponentNotFound;
for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
++matchIndex) {
componentName = matchingCodecs.itemAt(matchIndex).mName.string();
@@ -4884,7 +4949,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
pid_t tid = gettid();
int prevPriority = androidGetThreadPriority(tid);
androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
- status_t err = omx->allocateNode(componentName.c_str(), observer, &node);
+ err = omx->allocateNode(componentName.c_str(), observer, &node);
androidSetThreadPriority(tid, prevPriority);
if (err == OK) {
@@ -4898,17 +4963,17 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
if (node == NULL) {
if (!mime.empty()) {
- ALOGE("Unable to instantiate a %scoder for type '%s'.",
- encoder ? "en" : "de", mime.c_str());
+ ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.",
+ encoder ? "en" : "de", mime.c_str(), err);
} else {
- ALOGE("Unable to instantiate codec '%s'.", componentName.c_str());
+ ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);
}
- mCodec->signalError(OMX_ErrorComponentNotFound);
+ mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
return false;
}
- notify = new AMessage(kWhatOMXMessage, mCodec->id());
+ notify = new AMessage(kWhatOMXMessage, mCodec);
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
@@ -5114,6 +5179,21 @@ void ACodec::LoadedState::onCreateInputSurface(
}
}
+ if (err == OK && mCodec->mMaxFps > 0) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_MAX_FPS,
+ &mCodec->mMaxFps,
+ sizeof(mCodec->mMaxFps));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure max fps (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
if (err == OK && mCodec->mTimePerCaptureUs > 0ll
&& mCodec->mTimePerFrameUs > 0ll) {
int64_t timeLapse[2];
@@ -5984,7 +6064,7 @@ bool ACodec::FlushingState::onOMXEvent(
case OMX_EventPortSettingsChanged:
{
- sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id());
+ sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec);
msg->setInt32("type", omx_message::EVENT);
msg->setInt32("node", mCodec->mNode);
msg->setInt32("event", event);
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 9aa7d95..f53d7f0 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -31,19 +31,6 @@
namespace android {
-AMRWriter::AMRWriter(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mStarted(false),
- mPaused(false),
- mResumed(false) {
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
AMRWriter::AMRWriter(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 2629afc..b0eeb7f 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -31,11 +31,14 @@ LOCAL_SRC_FILES:= \
MediaAdapter.cpp \
MediaBuffer.cpp \
MediaBufferGroup.cpp \
+ MediaClock.cpp \
MediaCodec.cpp \
MediaCodecList.cpp \
+ MediaCodecListOverrides.cpp \
MediaCodecSource.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ MediaSync.cpp \
MidiExtractor.cpp \
http/MediaHTTP.cpp \
MediaMuxer.cpp \
@@ -46,6 +49,7 @@ LOCAL_SRC_FILES:= \
OMXClient.cpp \
OMXCodec.cpp \
OggExtractor.cpp \
+ ProcessInfo.cpp \
SampleIterator.cpp \
SampleTable.cpp \
SkipCutBuffer.cpp \
@@ -101,6 +105,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
libstagefright_aacenc \
libstagefright_matroska \
+ libstagefright_mediafilter \
libstagefright_webm \
libstagefright_timedtext \
libvpx \
@@ -108,13 +113,14 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_mpeg2ts \
libstagefright_id3 \
libFLAC \
- libmedia_helper
+ libmedia_helper \
LOCAL_SHARED_LIBRARIES += \
libstagefright_enc_common \
libstagefright_avc_common \
libstagefright_foundation \
- libdl
+ libdl \
+ libRScpp \
LOCAL_CFLAGS += -Wno-multichar
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index a7ca3da..f0db76b 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FileSource"
+#include <utils/Log.h>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/FileSource.h>
#include <sys/types.h>
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 0c2ff15..77a652a 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -75,7 +75,11 @@ void HTTPBase::addBandwidthMeasurement(
bool HTTPBase::estimateBandwidth(int32_t *bandwidth_bps) {
Mutex::Autolock autoLock(mLock);
- if (mNumBandwidthHistoryItems < 2) {
+ // Do not do bandwidth estimation if we don't have enough samples, or
+ // total bytes download are too small (<64K).
+ // Bandwidth estimation from these samples can often shoot up and cause
+ // unwanted bw adaption behaviors.
+ if (mNumBandwidthHistoryItems < 2 || mTotalTransferBytes < 65536) {
return false;
}
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 9856f92..ef07aa0 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -135,7 +135,7 @@ void MPEG2TSWriter::SourceInfo::start(const sp<AMessage> &notify) {
mNotify = notify;
- (new AMessage(kWhatStart, id()))->post();
+ (new AMessage(kWhatStart, this))->post();
}
void MPEG2TSWriter::SourceInfo::stop() {
@@ -361,7 +361,7 @@ bool MPEG2TSWriter::SourceInfo::flushAACFrames() {
}
void MPEG2TSWriter::SourceInfo::readMore() {
- (new AMessage(kWhatRead, id()))->post();
+ (new AMessage(kWhatRead, this))->post();
}
void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) {
@@ -480,19 +480,6 @@ MPEG2TSWriter::MPEG2TSWriter(int fd)
init();
}
-MPEG2TSWriter::MPEG2TSWriter(const char *filename)
- : mFile(fopen(filename, "wb")),
- mWriteCookie(NULL),
- mWriteFunc(NULL),
- mStarted(false),
- mNumSourcesDone(0),
- mNumTSPacketsWritten(0),
- mNumTSPacketsBeforeMeta(0),
- mPATContinuityCounter(0),
- mPMTContinuityCounter(0) {
- init();
-}
-
MPEG2TSWriter::MPEG2TSWriter(
void *cookie,
ssize_t (*write)(void *cookie, const void *data, size_t size))
@@ -565,7 +552,7 @@ status_t MPEG2TSWriter::start(MetaData * /* param */) {
for (size_t i = 0; i < mSources.size(); ++i) {
sp<AMessage> notify =
- new AMessage(kWhatSourceNotify, mReflector->id());
+ new AMessage(kWhatSourceNotify, mReflector);
notify->setInt32("source-index", i);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 3dd8b11..28a9ed9 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -354,6 +354,8 @@ static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t
MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)
: mMoofOffset(0),
+ mMoofFound(false),
+ mMdatFound(false),
mDataSource(source),
mInitCheck(NO_INIT),
mHasVideo(false),
@@ -490,7 +492,9 @@ status_t MPEG4Extractor::readMetaData() {
off64_t offset = 0;
status_t err;
- while (true) {
+ bool sawMoovOrSidx = false;
+
+ while (!(sawMoovOrSidx && (mMdatFound || mMoofFound))) {
off64_t orig_offset = offset;
err = parseChunk(&offset, 0);
@@ -502,23 +506,9 @@ status_t MPEG4Extractor::readMetaData() {
ALOGE("did not advance: 0x%lld->0x%lld", orig_offset, offset);
err = ERROR_MALFORMED;
break;
- } else if (err == OK) {
- continue;
- }
-
- uint32_t hdr[2];
- if (mDataSource->readAt(offset, hdr, 8) < 8) {
- break;
+ } else if (err == UNKNOWN_ERROR) {
+ sawMoovOrSidx = true;
}
- uint32_t chunk_type = ntohl(hdr[1]);
- if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
- // store the offset of the first segment
- mMoofOffset = offset;
- } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) {
- // keep parsing until we get to the data
- continue;
- }
- break;
}
if (mInitCheck == OK) {
@@ -749,6 +739,17 @@ static bool underMetaDataPath(const Vector<uint32_t> &path) {
&& path[3] == FOURCC('i', 'l', 's', 't');
}
+static bool underQTMetaPath(const Vector<uint32_t> &path, int32_t depth) {
+ return path.size() >= 2
+ && path[0] == FOURCC('m', 'o', 'o', 'v')
+ && path[1] == FOURCC('m', 'e', 't', 'a')
+ && (depth == 2
+ || (depth == 3
+ && (path[2] == FOURCC('h', 'd', 'l', 'r')
+ || path[2] == FOURCC('i', 'l', 's', 't')
+ || path[2] == FOURCC('k', 'e', 'y', 's'))));
+}
+
// Given a time in seconds since Jan 1 1904, produce a human-readable string.
static void convertTimeToDate(int64_t time_1904, String8 *s) {
time_t time_1970 = time_1904 - (((66 * 365 + 17) * 24) * 3600);
@@ -864,6 +865,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 'c', 'h', 'i'):
case FOURCC('e', 'd', 't', 's'):
{
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) {
+ // store the offset of the first segment
+ mMoofFound = true;
+ mMoofOffset = *offset;
+ }
+
if (chunk_type == FOURCC('s', 't', 'b', 'l')) {
ALOGV("sampleTable chunk is %" PRIu64 " bytes long.", chunk_size);
@@ -878,6 +885,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->sampleTable = new SampleTable(mDataSource);
}
@@ -1032,6 +1042,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
original_fourcc = ntohl(original_fourcc);
ALOGV("read original format: %d", original_fourcc);
+
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));
uint32_t num_channels = 0;
uint32_t sample_rate = 0;
@@ -1087,6 +1101,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId);
mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize);
mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16);
@@ -1202,7 +1219,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
duration = ntohl(duration32);
}
}
- if (duration != 0) {
+ if (duration != 0 && mLastTrack->timescale != 0) {
mLastTrack->meta->setInt64(
kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
}
@@ -1266,6 +1283,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// display the timed text.
// For encrypted files, there may also be more than one entry.
const char *mime;
+
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) &&
strcasecmp(mime, "application/octet-stream")) {
@@ -1312,6 +1333,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
uint16_t sample_size = U16_AT(&buffer[18]);
uint32_t sample_rate = U32_AT(&buffer[24]) >> 16;
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
if (chunk_type != FOURCC('e', 'n', 'c', 'a')) {
// if the chunk type is enca, we'll get the type from the sinf/frma box later
mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
@@ -1373,6 +1397,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// printf("*** coding='%s' width=%d height=%d\n",
// chunk, width, height);
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
if (chunk_type != FOURCC('e', 'n', 'c', 'v')) {
// if the chunk type is encv, we'll get the type from the sinf/frma box later
mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
@@ -1398,6 +1425,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 'c', 'o'):
case FOURCC('c', 'o', '6', '4'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
status_t err =
mLastTrack->sampleTable->setChunkOffsetParams(
chunk_type, data_offset, chunk_data_size);
@@ -1413,6 +1443,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 's', 'c'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
status_t err =
mLastTrack->sampleTable->setSampleToChunkParams(
data_offset, chunk_data_size);
@@ -1429,6 +1462,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 's', 'z'):
case FOURCC('s', 't', 'z', '2'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
status_t err =
mLastTrack->sampleTable->setSampleSizeParams(
chunk_type, data_offset, chunk_data_size);
@@ -1498,6 +1534,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 't', 's'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
*offset += chunk_size;
status_t err =
@@ -1513,6 +1552,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('c', 't', 't', 's'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
*offset += chunk_size;
status_t err =
@@ -1528,6 +1570,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 's', 's'):
{
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ return ERROR_MALFORMED;
+
*offset += chunk_size;
status_t err =
@@ -1600,6 +1645,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_MALFORMED;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setData(
kKeyESDS, kTypeESDS, &buffer[4], chunk_data_size - 4);
@@ -1632,6 +1680,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setData(
kKeyAVCC, kTypeAVCC, buffer->data(), chunk_data_size);
@@ -1646,6 +1697,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setData(
kKeyHVCC, kTypeHVCC, buffer->data(), chunk_data_size);
@@ -1679,6 +1733,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size);
break;
@@ -1686,31 +1743,35 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('m', 'e', 't', 'a'):
{
- uint8_t buffer[4];
- if (chunk_data_size < (off64_t)sizeof(buffer)) {
- *offset += chunk_size;
- return ERROR_MALFORMED;
- }
+ off64_t stop_offset = *offset + chunk_size;
+ *offset = data_offset;
+ bool isParsingMetaKeys = underQTMetaPath(mPath, 2);
+ if (!isParsingMetaKeys) {
+ uint8_t buffer[4];
+ if (chunk_data_size < (off64_t)sizeof(buffer)) {
+ *offset += chunk_size;
+ return ERROR_MALFORMED;
+ }
- if (mDataSource->readAt(
- data_offset, buffer, 4) < 4) {
- *offset += chunk_size;
- return ERROR_IO;
- }
+ if (mDataSource->readAt(
+ data_offset, buffer, 4) < 4) {
+ *offset += chunk_size;
+ return ERROR_IO;
+ }
- if (U32_AT(buffer) != 0) {
- // Should be version 0, flags 0.
+ if (U32_AT(buffer) != 0) {
+ // Should be version 0, flags 0.
- // If it's not, let's assume this is one of those
- // apparently malformed chunks that don't have flags
- // and completely different semantics than what's
- // in the MPEG4 specs and skip it.
- *offset += chunk_size;
- return OK;
+ // If it's not, let's assume this is one of those
+ // apparently malformed chunks that don't have flags
+ // and completely different semantics than what's
+ // in the MPEG4 specs and skip it.
+ *offset += chunk_size;
+ return OK;
+ }
+ *offset += sizeof(buffer);
}
- off64_t stop_offset = *offset + chunk_size;
- *offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
if (err != OK) {
@@ -1776,7 +1837,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
duration = d32;
}
- if (duration != 0) {
+ if (duration != 0 && mHeaderTimescale != 0) {
mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
}
@@ -1825,7 +1886,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_MALFORMED;
}
- if (duration != 0) {
+ if (duration != 0 && mHeaderTimescale != 0) {
mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
}
@@ -1835,6 +1896,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('m', 'd', 'a', 't'):
{
ALOGV("mdat chunk, drm: %d", mIsDrm);
+
+ mMdatFound = true;
+
if (!mIsDrm) {
*offset += chunk_size;
break;
@@ -1851,12 +1915,19 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
{
*offset += chunk_size;
+ if (underQTMetaPath(mPath, 3)) {
+ break;
+ }
+
uint32_t buffer;
if (mDataSource->readAt(
data_offset + 8, &buffer, 4) < 4) {
return ERROR_IO;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
uint32_t type = ntohl(buffer);
// For the 3GPP file format, the handler-type within the 'hdlr' box
// shall be 'text'. We also want to support 'sbtl' handler type
@@ -1868,6 +1939,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ case FOURCC('k', 'e', 'y', 's'):
+ {
+ *offset += chunk_size;
+
+ if (underQTMetaPath(mPath, 3)) {
+ parseQTMetaKey(data_offset, chunk_data_size);
+ }
+ break;
+ }
+
case FOURCC('t', 'r', 'e', 'x'):
{
*offset += chunk_size;
@@ -1889,6 +1970,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('t', 'x', '3', 'g'):
{
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
uint32_t type;
const void *data;
size_t size = 0;
@@ -1995,6 +2079,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
default:
{
+ // check if we're parsing 'ilst' for meta keys
+ // if so, treat type as a number (key-id).
+ if (underQTMetaPath(mPath, 3)) {
+ parseQTMetaVal(chunk_type, data_offset, chunk_data_size);
+ }
+
*offset += chunk_size;
break;
}
@@ -2030,6 +2120,8 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
return ERROR_MALFORMED;
}
ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale);
+ if (timeScale == 0)
+ return ERROR_MALFORMED;
uint64_t earliestPresentationTime;
uint64_t firstOffset;
@@ -2113,6 +2205,9 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
uint64_t sidxDuration = total_duration * 1000000 / timeScale;
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
int64_t metaDuration;
if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) {
mLastTrack->meta->setInt64(kKeyDuration, sidxDuration);
@@ -2120,7 +2215,108 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
return OK;
}
+status_t MPEG4Extractor::parseQTMetaKey(off64_t offset, size_t size) {
+ if (size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t count;
+ if (!mDataSource->getUInt32(offset + 4, &count)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (mMetaKeyMap.size() > 0) {
+ ALOGW("'keys' atom seen again, discarding existing entries");
+ mMetaKeyMap.clear();
+ }
+
+ off64_t keyOffset = offset + 8;
+ off64_t stopOffset = offset + size;
+ for (size_t i = 1; i <= count; i++) {
+ if (keyOffset + 8 > stopOffset) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t keySize;
+ if (!mDataSource->getUInt32(keyOffset, &keySize)
+ || keySize < 8
+ || keyOffset + keySize > stopOffset) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t type;
+ if (!mDataSource->getUInt32(keyOffset + 4, &type)
+ || type != FOURCC('m', 'd', 't', 'a')) {
+ return ERROR_MALFORMED;
+ }
+
+ keySize -= 8;
+ keyOffset += 8;
+
+ sp<ABuffer> keyData = new ABuffer(keySize);
+ if (keyData->data() == NULL) {
+ return ERROR_MALFORMED;
+ }
+ if (mDataSource->readAt(
+ keyOffset, keyData->data(), keySize) < (ssize_t) keySize) {
+ return ERROR_MALFORMED;
+ }
+
+ AString key((const char *)keyData->data(), keySize);
+ mMetaKeyMap.add(i, key);
+
+ keyOffset += keySize;
+ }
+ return OK;
+}
+
+status_t MPEG4Extractor::parseQTMetaVal(
+ int32_t keyId, off64_t offset, size_t size) {
+ ssize_t index = mMetaKeyMap.indexOfKey(keyId);
+ if (index < 0) {
+ // corresponding key is not present, ignore
+ return ERROR_MALFORMED;
+ }
+
+ if (size <= 16) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t dataSize;
+ if (!mDataSource->getUInt32(offset, &dataSize)
+ || dataSize > size || dataSize <= 16) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t atomFourCC;
+ if (!mDataSource->getUInt32(offset + 4, &atomFourCC)
+ || atomFourCC != FOURCC('d', 'a', 't', 'a')) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t dataType;
+ if (!mDataSource->getUInt32(offset + 8, &dataType)
+ || ((dataType & 0xff000000) != 0)) {
+ // not well-known type
+ return ERROR_MALFORMED;
+ }
+ dataSize -= 16;
+ offset += 16;
+
+ if (dataType == 23 && dataSize >= 4) {
+ // BE Float32
+ uint32_t val;
+ if (!mDataSource->getUInt32(offset, &val)) {
+ return ERROR_MALFORMED;
+ }
+ if (!strcasecmp(mMetaKeyMap[index].c_str(), "com.android.capture.fps")) {
+ mFileMetaData->setFloat(kKeyCaptureFramerate, *(float *)&val);
+ }
+ } else {
+ // add more keys if needed
+ ALOGV("ignoring key: type %d, size %d", dataType, dataSize);
+ }
+
+ return OK;
+}
status_t MPEG4Extractor::parseTrackHeader(
off64_t data_offset, off64_t data_size) {
@@ -2163,6 +2359,9 @@ status_t MPEG4Extractor::parseTrackHeader(
return ERROR_UNSUPPORTED;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setInt32(kKeyTrackID, id);
size_t matrixOffset = dynSize + 16;
@@ -2345,6 +2544,9 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {
int32_t delay, padding;
if (sscanf(mLastCommentData,
" %*x %x %x %*x", &delay, &padding) == 2) {
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setInt32(kKeyEncoderDelay, delay);
mLastTrack->meta->setInt32(kKeyEncoderPadding, padding);
}
@@ -2712,6 +2914,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
if (objectTypeIndication == 0xe1) {
// This isn't MPEG4 audio at all, it's QCELP 14k...
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_QCELP);
return OK;
}
@@ -2760,6 +2965,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
objectType = 32 + br.getBits(6);
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
//keep AOT type
mLastTrack->meta->setInt32(kKeyAACAOT, objectType);
@@ -2930,6 +3138,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
return ERROR_UNSUPPORTED;
}
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
int32_t prevSampleRate;
CHECK(mLastTrack->meta->findInt32(kKeySampleRate, &prevSampleRate));
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 844a019..3d83e83 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -29,6 +29,7 @@
#include <utils/Log.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MetaData.h>
@@ -62,6 +63,14 @@ static const uint8_t kNalUnitTypeSeqParamSet = 0x07;
static const uint8_t kNalUnitTypePicParamSet = 0x08;
static const int64_t kInitialDelayTimeUs = 700000LL;
+static const char kMetaKey_Model[] = "com.android.model";
+static const char kMetaKey_Version[] = "com.android.version";
+static const char kMetaKey_Build[] = "com.android.build";
+static const char kMetaKey_CaptureFps[] = "com.android.capture.fps";
+
+/* uncomment to include model and build in meta */
+//#define SHOW_MODEL_BUILD 1
+
class MPEG4Writer::Track {
public:
Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId);
@@ -345,31 +354,6 @@ private:
Track &operator=(const Track &);
};
-MPEG4Writer::MPEG4Writer(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mIsRealTimeRecording(true),
- mUse4ByteNalLength(true),
- mUse32BitOffset(true),
- mIsFileSizeLimitExplicitlyRequested(false),
- mPaused(false),
- mStarted(false),
- mWriterThreadStarted(false),
- mOffset(0),
- mMdatOffset(0),
- mEstimatedMoovBoxSize(0),
- mInterleaveDurationUs(1000000),
- mLatitudex10000(0),
- mLongitudex10000(0),
- mAreGeoTagsAvailable(false),
- mStartTimeOffsetMs(-1) {
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
MPEG4Writer::MPEG4Writer(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
@@ -383,11 +367,14 @@ MPEG4Writer::MPEG4Writer(int fd)
mOffset(0),
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
+ mMoovExtraSize(0),
mInterleaveDurationUs(1000000),
mLatitudex10000(0),
mLongitudex10000(0),
mAreGeoTagsAvailable(false),
+ mMetaKeys(new AMessage()),
mStartTimeOffsetMs(-1) {
+ addDeviceMeta();
}
MPEG4Writer::~MPEG4Writer() {
@@ -507,6 +494,34 @@ status_t MPEG4Writer::startTracks(MetaData *params) {
return OK;
}
+void MPEG4Writer::addDeviceMeta() {
+ // add device info and estimate space in 'moov'
+ char val[PROPERTY_VALUE_MAX];
+ size_t n;
+ // meta size is estimated by adding up the following:
+ // - meta header structures, which occur only once (total 66 bytes)
+ // - size for each key, which consists of a fixed header (32 bytes),
+ // plus key length and data length.
+ mMoovExtraSize += 66;
+ if (property_get("ro.build.version.release", val, NULL)
+ && (n = strlen(val)) > 0) {
+ mMetaKeys->setString(kMetaKey_Version, val, n + 1);
+ mMoovExtraSize += sizeof(kMetaKey_Version) + n + 32;
+ }
+#ifdef SHOW_MODEL_BUILD
+ if (property_get("ro.product.model", val, NULL)
+ && (n = strlen(val)) > 0) {
+ mMetaKeys->setString(kMetaKey_Model, val, n + 1);
+ mMoovExtraSize += sizeof(kMetaKey_Model) + n + 32;
+ }
+ if (property_get("ro.build.display.id", val, NULL)
+ && (n = strlen(val)) > 0) {
+ mMetaKeys->setString(kMetaKey_Build, val, n + 1);
+ mMoovExtraSize += sizeof(kMetaKey_Build) + n + 32;
+ }
+#endif
+}
+
int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
// This implementation is highly experimental/heurisitic.
//
@@ -560,6 +575,9 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
size = MAX_MOOV_BOX_SIZE;
}
+ // Account for the extra stuff (Geo, meta keys, etc.)
+ size += mMoovExtraSize;
+
ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"
" estimated moov size %" PRId64 " bytes",
mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
@@ -973,6 +991,7 @@ void MPEG4Writer::writeMoovBox(int64_t durationUs) {
if (mAreGeoTagsAvailable) {
writeUdtaBox();
}
+ writeMetaBox();
int32_t id = 1;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it, ++id) {
@@ -1142,6 +1161,14 @@ size_t MPEG4Writer::write(
return bytes;
}
+void MPEG4Writer::beginBox(uint32_t id) {
+ mBoxes.push_back(mWriteMoovBoxToMemory?
+ mMoovBoxBufferOffset: mOffset);
+
+ writeInt32(0);
+ writeInt32(id);
+}
+
void MPEG4Writer::beginBox(const char *fourcc) {
CHECK_EQ(strlen(fourcc), 4);
@@ -1266,6 +1293,18 @@ status_t MPEG4Writer::setGeoData(int latitudex10000, int longitudex10000) {
mLatitudex10000 = latitudex10000;
mLongitudex10000 = longitudex10000;
mAreGeoTagsAvailable = true;
+ mMoovExtraSize += 30;
+ return OK;
+}
+
+status_t MPEG4Writer::setCaptureRate(float captureFps) {
+ if (captureFps <= 0.0f) {
+ return BAD_VALUE;
+ }
+
+ mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps);
+ mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+
return OK;
}
@@ -3099,6 +3138,103 @@ void MPEG4Writer::writeUdtaBox() {
endBox();
}
+void MPEG4Writer::writeHdlr() {
+ beginBox("hdlr");
+ writeInt32(0); // Version, Flags
+ writeInt32(0); // Predefined
+ writeFourcc("mdta");
+ writeInt32(0); // Reserved[0]
+ writeInt32(0); // Reserved[1]
+ writeInt32(0); // Reserved[2]
+ writeInt8(0); // Name (empty)
+ endBox();
+}
+
+void MPEG4Writer::writeKeys() {
+ size_t count = mMetaKeys->countEntries();
+
+ beginBox("keys");
+ writeInt32(0); // Version, Flags
+ writeInt32(count); // Entry_count
+ for (size_t i = 0; i < count; i++) {
+ AMessage::Type type;
+ const char *key = mMetaKeys->getEntryNameAt(i, &type);
+ size_t n = strlen(key);
+ writeInt32(n + 8);
+ writeFourcc("mdta");
+ write(key, n); // write without the \0
+ }
+ endBox();
+}
+
+void MPEG4Writer::writeIlst() {
+ size_t count = mMetaKeys->countEntries();
+
+ beginBox("ilst");
+ for (size_t i = 0; i < count; i++) {
+ beginBox(i + 1); // key id (1-based)
+ beginBox("data");
+ AMessage::Type type;
+ const char *key = mMetaKeys->getEntryNameAt(i, &type);
+ switch (type) {
+ case AMessage::kTypeString:
+ {
+ AString val;
+ CHECK(mMetaKeys->findString(key, &val));
+ writeInt32(1); // type = UTF8
+ writeInt32(0); // default country/language
+ write(val.c_str(), strlen(val.c_str())); // write without \0
+ break;
+ }
+
+ case AMessage::kTypeFloat:
+ {
+ float val;
+ CHECK(mMetaKeys->findFloat(key, &val));
+ writeInt32(23); // type = float32
+ writeInt32(0); // default country/language
+ writeInt32(*reinterpret_cast<int32_t *>(&val));
+ break;
+ }
+
+ case AMessage::kTypeInt32:
+ {
+ int32_t val;
+ CHECK(mMetaKeys->findInt32(key, &val));
+ writeInt32(67); // type = signed int32
+ writeInt32(0); // default country/language
+ writeInt32(val);
+ break;
+ }
+
+ default:
+ {
+ ALOGW("Unsupported key type, writing 0 instead");
+ writeInt32(77); // type = unsigned int32
+ writeInt32(0); // default country/language
+ writeInt32(0);
+ break;
+ }
+ }
+ endBox(); // data
+ endBox(); // key id
+ }
+ endBox(); // ilst
+}
+
+void MPEG4Writer::writeMetaBox() {
+ size_t count = mMetaKeys->countEntries();
+ if (count == 0) {
+ return;
+ }
+
+ beginBox("meta");
+ writeHdlr();
+ writeKeys();
+ writeIlst();
+ endBox();
+}
+
/*
* Geodata is stored according to ISO-6709 standard.
*/
diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp
new file mode 100644
index 0000000..2641e4e
--- /dev/null
+++ b/media/libstagefright/MediaClock.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaClock"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaClock.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+namespace android {
+
+MediaClock::MediaClock()
+ : mAnchorTimeMediaUs(-1),
+ mAnchorTimeRealUs(-1),
+ mMaxTimeMediaUs(INT64_MAX),
+ mStartingTimeMediaUs(-1),
+ mPlaybackRate(1.0) {
+}
+
+MediaClock::~MediaClock() {
+}
+
+void MediaClock::setStartingTimeMedia(int64_t startingTimeMediaUs) {
+ Mutex::Autolock autoLock(mLock);
+ mStartingTimeMediaUs = startingTimeMediaUs;
+}
+
+void MediaClock::clearAnchor() {
+ Mutex::Autolock autoLock(mLock);
+ mAnchorTimeMediaUs = -1;
+ mAnchorTimeRealUs = -1;
+}
+
+void MediaClock::updateAnchor(
+ int64_t anchorTimeMediaUs,
+ int64_t anchorTimeRealUs,
+ int64_t maxTimeMediaUs) {
+ if (anchorTimeMediaUs < 0 || anchorTimeRealUs < 0) {
+ ALOGW("reject anchor time since it is negative.");
+ return;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t nowMediaUs =
+ anchorTimeMediaUs + (nowUs - anchorTimeRealUs) * (double)mPlaybackRate;
+ if (nowMediaUs < 0) {
+ ALOGW("reject anchor time since it leads to negative media time.");
+ return;
+ }
+ mAnchorTimeRealUs = nowUs;
+ mAnchorTimeMediaUs = nowMediaUs;
+ mMaxTimeMediaUs = maxTimeMediaUs;
+}
+
+void MediaClock::updateMaxTimeMedia(int64_t maxTimeMediaUs) {
+ Mutex::Autolock autoLock(mLock);
+ mMaxTimeMediaUs = maxTimeMediaUs;
+}
+
+void MediaClock::setPlaybackRate(float rate) {
+ CHECK_GE(rate, 0.0);
+ Mutex::Autolock autoLock(mLock);
+ if (mAnchorTimeRealUs == -1) {
+ mPlaybackRate = rate;
+ return;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ mAnchorTimeMediaUs += (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
+ if (mAnchorTimeMediaUs < 0) {
+ ALOGW("setRate: anchor time should not be negative, set to 0.");
+ mAnchorTimeMediaUs = 0;
+ }
+ mAnchorTimeRealUs = nowUs;
+ mPlaybackRate = rate;
+}
+
+float MediaClock::getPlaybackRate() const {
+ Mutex::Autolock autoLock(mLock);
+ return mPlaybackRate;
+}
+
+status_t MediaClock::getMediaTime(
+ int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
+ if (outMediaUs == NULL) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime);
+}
+
+status_t MediaClock::getMediaTime_l(
+ int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
+ if (mAnchorTimeRealUs == -1) {
+ return NO_INIT;
+ }
+
+ int64_t mediaUs = mAnchorTimeMediaUs
+ + (realUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
+ if (mediaUs > mMaxTimeMediaUs && !allowPastMaxTime) {
+ mediaUs = mMaxTimeMediaUs;
+ }
+ if (mediaUs < mStartingTimeMediaUs) {
+ mediaUs = mStartingTimeMediaUs;
+ }
+ if (mediaUs < 0) {
+ mediaUs = 0;
+ }
+ *outMediaUs = mediaUs;
+ return OK;
+}
+
+status_t MediaClock::getRealTimeFor(
+ int64_t targetMediaUs, int64_t *outRealUs) const {
+ if (outRealUs == NULL) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ if (mPlaybackRate == 0.0) {
+ return NO_INIT;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t nowMediaUs;
+ status_t status =
+ getMediaTime_l(nowUs, &nowMediaUs, true /* allowPastMaxTime */);
+ if (status != OK) {
+ return status;
+ }
+ *outRealUs = (targetMediaUs - nowMediaUs) / (double)mPlaybackRate + nowUs;
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6ca123a..40818f9 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -22,9 +22,13 @@
#include "include/SoftwareRenderer.h"
#include <binder/IBatteryStats.h>
+#include <binder/IMemory.h>
+#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <binder/MemoryDealer.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
+#include <media/IResourceManagerService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -36,6 +40,7 @@
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaFilter.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/NativeWindowWrapper.h>
#include <private/android_filesystem_config.h>
@@ -44,18 +49,72 @@
namespace android {
+static inline int getCallingPid() {
+ return IPCThreadState::self()->getCallingPid();
+}
+
+static int64_t getId(sp<IResourceManagerClient> client) {
+ return (int64_t) client.get();
+}
+
+static bool isResourceError(status_t err) {
+ return (err == OMX_ErrorInsufficientResources);
+}
+
+static const int kMaxRetry = 2;
+
+struct ResourceManagerClient : public BnResourceManagerClient {
+ ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
+
+ virtual bool reclaimResource() {
+ sp<MediaCodec> codec = mMediaCodec.promote();
+ if (codec == NULL) {
+ // codec is already gone.
+ return true;
+ }
+ status_t err = codec->release();
+ if (err != OK) {
+ ALOGW("ResourceManagerClient failed to release codec with err %d", err);
+ }
+ return (err == OK);
+ }
+
+protected:
+ virtual ~ResourceManagerClient() {}
+
+private:
+ wp<MediaCodec> mMediaCodec;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
+};
+
struct MediaCodec::BatteryNotifier : public Singleton<BatteryNotifier> {
BatteryNotifier();
+ virtual ~BatteryNotifier();
void noteStartVideo();
void noteStopVideo();
void noteStartAudio();
void noteStopAudio();
+ void onBatteryStatServiceDied();
private:
+ struct DeathNotifier : public IBinder::DeathRecipient {
+ DeathNotifier() {}
+ virtual void binderDied(const wp<IBinder>& /*who*/) {
+ BatteryNotifier::getInstance().onBatteryStatServiceDied();
+ }
+ };
+
+ Mutex mLock;
int32_t mVideoRefCount;
int32_t mAudioRefCount;
sp<IBatteryStats> mBatteryStatService;
+ sp<DeathNotifier> mDeathNotifier;
+
+ sp<IBatteryStats> getBatteryService_l();
+
+ DISALLOW_EVIL_CONSTRUCTORS(BatteryNotifier);
};
ANDROID_SINGLETON_STATIC_INSTANCE(MediaCodec::BatteryNotifier)
@@ -63,54 +122,162 @@ ANDROID_SINGLETON_STATIC_INSTANCE(MediaCodec::BatteryNotifier)
MediaCodec::BatteryNotifier::BatteryNotifier() :
mVideoRefCount(0),
mAudioRefCount(0) {
- // get battery service
+}
+
+sp<IBatteryStats> MediaCodec::BatteryNotifier::getBatteryService_l() {
+ if (mBatteryStatService != NULL) {
+ return mBatteryStatService;
+ }
+ // get battery service from service manager
const sp<IServiceManager> sm(defaultServiceManager());
if (sm != NULL) {
const String16 name("batterystats");
- mBatteryStatService = interface_cast<IBatteryStats>(sm->getService(name));
+ mBatteryStatService =
+ interface_cast<IBatteryStats>(sm->getService(name));
if (mBatteryStatService == NULL) {
ALOGE("batterystats service unavailable!");
+ return NULL;
+ }
+ mDeathNotifier = new DeathNotifier();
+ if (IInterface::asBinder(mBatteryStatService)->
+ linkToDeath(mDeathNotifier) != OK) {
+ mBatteryStatService.clear();
+ mDeathNotifier.clear();
+ return NULL;
+ }
+ // notify start now if media already started
+ if (mVideoRefCount > 0) {
+ mBatteryStatService->noteStartVideo(AID_MEDIA);
+ }
+ if (mAudioRefCount > 0) {
+ mBatteryStatService->noteStartAudio(AID_MEDIA);
}
}
+ return mBatteryStatService;
+}
+
+MediaCodec::BatteryNotifier::~BatteryNotifier() {
+ if (mDeathNotifier != NULL) {
+ IInterface::asBinder(mBatteryStatService)->
+ unlinkToDeath(mDeathNotifier);
+ }
}
void MediaCodec::BatteryNotifier::noteStartVideo() {
- if (mVideoRefCount == 0 && mBatteryStatService != NULL) {
- mBatteryStatService->noteStartVideo(AID_MEDIA);
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mVideoRefCount == 0 && batteryService != NULL) {
+ batteryService->noteStartVideo(AID_MEDIA);
}
mVideoRefCount++;
}
void MediaCodec::BatteryNotifier::noteStopVideo() {
+ Mutex::Autolock _l(mLock);
if (mVideoRefCount == 0) {
ALOGW("BatteryNotifier::noteStop(): video refcount is broken!");
return;
}
mVideoRefCount--;
- if (mVideoRefCount == 0 && mBatteryStatService != NULL) {
- mBatteryStatService->noteStopVideo(AID_MEDIA);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mVideoRefCount == 0 && batteryService != NULL) {
+ batteryService->noteStopVideo(AID_MEDIA);
}
}
void MediaCodec::BatteryNotifier::noteStartAudio() {
- if (mAudioRefCount == 0 && mBatteryStatService != NULL) {
- mBatteryStatService->noteStartAudio(AID_MEDIA);
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mAudioRefCount == 0 && batteryService != NULL) {
+ batteryService->noteStartAudio(AID_MEDIA);
}
mAudioRefCount++;
}
void MediaCodec::BatteryNotifier::noteStopAudio() {
+ Mutex::Autolock _l(mLock);
if (mAudioRefCount == 0) {
ALOGW("BatteryNotifier::noteStop(): audio refcount is broken!");
return;
}
mAudioRefCount--;
- if (mAudioRefCount == 0 && mBatteryStatService != NULL) {
- mBatteryStatService->noteStopAudio(AID_MEDIA);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mAudioRefCount == 0 && batteryService != NULL) {
+ batteryService->noteStopAudio(AID_MEDIA);
+ }
+}
+
+void MediaCodec::BatteryNotifier::onBatteryStatServiceDied() {
+ Mutex::Autolock _l(mLock);
+ mBatteryStatService.clear();
+ mDeathNotifier.clear();
+ // Do not reset mVideoRefCount and mAudioRefCount here. The ref
+ // counting is independent of the battery service availability.
+ // We need this if battery service becomes available after media
+ // started.
+}
+
+MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy() {
+}
+
+MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
+ if (mService != NULL) {
+ IInterface::asBinder(mService)->unlinkToDeath(this);
+ }
+}
+
+void MediaCodec::ResourceManagerServiceProxy::init() {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.resource_manager"));
+ mService = interface_cast<IResourceManagerService>(binder);
+ if (mService == NULL) {
+ ALOGE("Failed to get ResourceManagerService");
+ return;
+ }
+ if (IInterface::asBinder(mService)->linkToDeath(this) != OK) {
+ mService.clear();
+ ALOGE("Failed to linkToDeath to ResourceManagerService.");
+ return;
+ }
+}
+
+void MediaCodec::ResourceManagerServiceProxy::binderDied(const wp<IBinder>& /*who*/) {
+ ALOGW("ResourceManagerService died.");
+ Mutex::Autolock _l(mLock);
+ mService.clear();
+}
+
+void MediaCodec::ResourceManagerServiceProxy::addResource(
+ int pid,
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources) {
+ Mutex::Autolock _l(mLock);
+ if (mService == NULL) {
+ return;
+ }
+ mService->addResource(pid, clientId, client, resources);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::removeResource(int64_t clientId) {
+ Mutex::Autolock _l(mLock);
+ if (mService == NULL) {
+ return;
+ }
+ mService->removeResource(clientId);
+}
+
+bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
+ int callingPid, const Vector<MediaResource> &resources) {
+ Mutex::Autolock _l(mLock);
+ if (mService == NULL) {
+ return false;
}
+ return mService->reclaimResource(callingPid, resources);
}
+
// static
sp<MediaCodec> MediaCodec::CreateByType(
const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err) {
@@ -141,10 +308,14 @@ MediaCodec::MediaCodec(const sp<ALooper> &looper)
mCodec(NULL),
mReplyID(0),
mFlags(0),
+ mResourceManagerClient(new ResourceManagerClient(this)),
+ mResourceManagerService(new ResourceManagerServiceProxy()),
mStickyError(OK),
mSoftRenderer(NULL),
mBatteryStatNotified(false),
mIsVideo(false),
+ mVideoWidth(0),
+ mVideoHeight(0),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -154,6 +325,7 @@ MediaCodec::MediaCodec(const sp<ALooper> &looper)
MediaCodec::~MediaCodec() {
CHECK_EQ(mState, UNINITIALIZED);
+ mResourceManagerService->removeResource(getId(mResourceManagerClient));
}
// static
@@ -173,13 +345,15 @@ status_t MediaCodec::PostAndAwaitResponse(
}
// static
-void MediaCodec::PostReplyWithError(int32_t replyID, int32_t err) {
+void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
response->postReply(replyID);
}
status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
+ mResourceManagerService->init();
+
// save init parameters for reset
mInitName = name;
mInitNameIsType = nameIsType;
@@ -189,13 +363,23 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
- mCodec = new ACodec;
- bool needDedicatedLooper = false;
+
+ if (nameIsType || !strncasecmp(name.c_str(), "omx.", 4)) {
+ mCodec = new ACodec;
+ } else if (!nameIsType
+ && !strncasecmp(name.c_str(), "android.filter.", 15)) {
+ mCodec = new MediaFilter;
+ } else {
+ return NAME_NOT_FOUND;
+ }
+
+ bool secureCodec = false;
if (nameIsType && !strncasecmp(name.c_str(), "video/", 6)) {
- needDedicatedLooper = true;
+ mIsVideo = true;
} else {
AString tmp = name;
if (tmp.endsWith(".secure")) {
+ secureCodec = true;
tmp.erase(tmp.size() - 7, 7);
}
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
@@ -206,14 +390,15 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
info->getSupportedMimes(&mimes);
for (size_t i = 0; i < mimes.size(); i++) {
if (mimes[i].startsWith("video/")) {
- needDedicatedLooper = true;
+ mIsVideo = true;
break;
}
}
}
}
- if (needDedicatedLooper) {
+ if (mIsVideo) {
+ // video codec needs dedicated looper
if (mCodecLooper == NULL) {
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
@@ -227,9 +412,9 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
mLooper->registerHandler(this);
- mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, id()));
+ mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, this));
- sp<AMessage> msg = new AMessage(kWhatInit, id());
+ sp<AMessage> msg = new AMessage(kWhatInit, this);
msg->setString("name", name);
msg->setInt32("nameIsType", nameIsType);
@@ -237,12 +422,29 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
msg->setInt32("encoder", encoder);
}
- sp<AMessage> response;
- return PostAndAwaitResponse(msg, &response);
+ status_t err;
+ Vector<MediaResource> resources;
+ const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec;
+ resources.push_back(MediaResource(String8(type), 1));
+ for (int i = 0; i <= kMaxRetry; ++i) {
+ if (i > 0) {
+ // Don't try to reclaim resource for the first time.
+ if (!mResourceManagerService->reclaimResource(getCallingPid(), resources)) {
+ break;
+ }
+ }
+
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(msg, &response);
+ if (!isResourceError(err)) {
+ break;
+ }
+ }
+ return err;
}
status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
- sp<AMessage> msg = new AMessage(kWhatSetCallback, id());
+ sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
msg->setMessage("callback", callback);
sp<AMessage> response;
@@ -254,7 +456,12 @@ status_t MediaCodec::configure(
const sp<Surface> &nativeWindow,
const sp<ICrypto> &crypto,
uint32_t flags) {
- sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+
+ if (mIsVideo) {
+ format->findInt32("width", &mVideoWidth);
+ format->findInt32("height", &mVideoHeight);
+ }
msg->setMessage("format", format);
msg->setInt32("flags", flags);
@@ -269,26 +476,47 @@ status_t MediaCodec::configure(
msg->setPointer("crypto", crypto.get());
}
- sp<AMessage> response;
- status_t err = PostAndAwaitResponse(msg, &response);
+ // save msg for reset
+ mConfigureMsg = msg;
- if (err != OK && err != INVALID_OPERATION) {
- // MediaCodec now set state to UNINITIALIZED upon any fatal error.
- // To maintain backward-compatibility, do a reset() to put codec
- // back into INITIALIZED state.
- // But don't reset if the err is INVALID_OPERATION, which means
- // the configure failure is due to wrong state.
+ status_t err;
+ Vector<MediaResource> resources;
+ const char *type = (mFlags & kFlagIsSecure) ?
+ kResourceSecureCodec : kResourceNonSecureCodec;
+ resources.push_back(MediaResource(String8(type), 1));
+ // Don't know the buffer size at this point, but it's fine to use 1 because
+ // the reclaimResource call doesn't consider the requester's buffer size for now.
+ resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+ for (int i = 0; i <= kMaxRetry; ++i) {
+ if (i > 0) {
+ // Don't try to reclaim resource for the first time.
+ if (!mResourceManagerService->reclaimResource(getCallingPid(), resources)) {
+ break;
+ }
+ }
- ALOGE("configure failed with err 0x%08x, resetting...", err);
- reset();
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(msg, &response);
+ if (err != OK && err != INVALID_OPERATION) {
+ // MediaCodec now set state to UNINITIALIZED upon any fatal error.
+ // To maintain backward-compatibility, do a reset() to put codec
+ // back into INITIALIZED state.
+ // But don't reset if the err is INVALID_OPERATION, which means
+ // the configure failure is due to wrong state.
+
+ ALOGE("configure failed with err 0x%08x, resetting...", err);
+ reset();
+ }
+ if (!isResourceError(err)) {
+ break;
+ }
}
-
return err;
}
status_t MediaCodec::createInputSurface(
sp<IGraphicBufferProducer>* bufferProducer) {
- sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, id());
+ sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
sp<AMessage> response;
status_t err = PostAndAwaitResponse(msg, &response);
@@ -306,22 +534,76 @@ status_t MediaCodec::createInputSurface(
return err;
}
+uint64_t MediaCodec::getGraphicBufferSize() {
+ if (!mIsVideo) {
+ return 0;
+ }
+
+ uint64_t size = 0;
+ size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
+ for (size_t i = 0; i < portNum; ++i) {
+ // TODO: this is just an estimation, we should get the real buffer size from ACodec.
+ size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
+ }
+ return size;
+}
+
+void MediaCodec::addResource(const char *type, uint64_t value) {
+ Vector<MediaResource> resources;
+ resources.push_back(MediaResource(String8(type), value));
+ mResourceManagerService->addResource(
+ getCallingPid(), getId(mResourceManagerClient), mResourceManagerClient, resources);
+}
+
status_t MediaCodec::start() {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
+ sp<AMessage> msg = new AMessage(kWhatStart, this);
- sp<AMessage> response;
- return PostAndAwaitResponse(msg, &response);
+ status_t err;
+ Vector<MediaResource> resources;
+ const char *type = (mFlags & kFlagIsSecure) ?
+ kResourceSecureCodec : kResourceNonSecureCodec;
+ resources.push_back(MediaResource(String8(type), 1));
+ // Don't know the buffer size at this point, but it's fine to use 1 because
+ // the reclaimResource call doesn't consider the requester's buffer size for now.
+ resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+ for (int i = 0; i <= kMaxRetry; ++i) {
+ if (i > 0) {
+ // Don't try to reclaim resource for the first time.
+ if (!mResourceManagerService->reclaimResource(getCallingPid(), resources)) {
+ break;
+ }
+ // Recover codec from previous error before retry start.
+ err = reset();
+ if (err != OK) {
+ ALOGE("retrying start: failed to reset codec");
+ break;
+ }
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(mConfigureMsg, &response);
+ if (err != OK) {
+ ALOGE("retrying start: failed to configure codec");
+ break;
+ }
+ }
+
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(msg, &response);
+ if (!isResourceError(err)) {
+ break;
+ }
+ }
+ return err;
}
status_t MediaCodec::stop() {
- sp<AMessage> msg = new AMessage(kWhatStop, id());
+ sp<AMessage> msg = new AMessage(kWhatStop, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::release() {
- sp<AMessage> msg = new AMessage(kWhatRelease, id());
+ sp<AMessage> msg = new AMessage(kWhatRelease, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
@@ -373,7 +655,7 @@ status_t MediaCodec::queueInputBuffer(
errorDetailMsg->clear();
}
- sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
msg->setSize("offset", offset);
msg->setSize("size", size);
@@ -400,7 +682,7 @@ status_t MediaCodec::queueSecureInputBuffer(
errorDetailMsg->clear();
}
- sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
msg->setSize("offset", offset);
msg->setPointer("subSamples", (void *)subSamples);
@@ -419,7 +701,7 @@ status_t MediaCodec::queueSecureInputBuffer(
}
status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
- sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
sp<AMessage> response;
@@ -440,7 +722,7 @@ status_t MediaCodec::dequeueOutputBuffer(
int64_t *presentationTimeUs,
uint32_t *flags,
int64_t timeoutUs) {
- sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
sp<AMessage> response;
@@ -459,7 +741,7 @@ status_t MediaCodec::dequeueOutputBuffer(
}
status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
- sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
msg->setInt32("render", true);
@@ -468,7 +750,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
}
status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
- sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
msg->setInt32("render", true);
msg->setInt64("timestampNs", timestampNs);
@@ -478,7 +760,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestam
}
status_t MediaCodec::releaseOutputBuffer(size_t index) {
- sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
sp<AMessage> response;
@@ -486,14 +768,14 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) {
}
status_t MediaCodec::signalEndOfInputStream() {
- sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, id());
+ sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
- sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id());
+ sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
sp<AMessage> response;
status_t err;
@@ -507,7 +789,7 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
}
status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
- sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id());
+ sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
sp<AMessage> response;
status_t err;
@@ -521,7 +803,7 @@ status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
}
status_t MediaCodec::getName(AString *name) const {
- sp<AMessage> msg = new AMessage(kWhatGetName, id());
+ sp<AMessage> msg = new AMessage(kWhatGetName, this);
sp<AMessage> response;
status_t err;
@@ -534,8 +816,18 @@ status_t MediaCodec::getName(AString *name) const {
return OK;
}
+status_t MediaCodec::getWidevineLegacyBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
+ msg->setInt32("portIndex", kPortIndexInput);
+ msg->setPointer("buffers", buffers);
+ msg->setInt32("widevine", true);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
- sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+ sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexInput);
msg->setPointer("buffers", buffers);
@@ -544,7 +836,7 @@ status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
}
status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const {
- sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+ sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexOutput);
msg->setPointer("buffers", buffers);
@@ -602,20 +894,20 @@ status_t MediaCodec::getBufferAndFormat(
}
status_t MediaCodec::flush() {
- sp<AMessage> msg = new AMessage(kWhatFlush, id());
+ sp<AMessage> msg = new AMessage(kWhatFlush, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::requestIDRFrame() {
- (new AMessage(kWhatRequestIDRFrame, id()))->post();
+ (new AMessage(kWhatRequestIDRFrame, this))->post();
return OK;
}
void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
- sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, id());
+ sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
msg->setMessage("notify", notify);
msg->post();
}
@@ -640,7 +932,7 @@ void MediaCodec::cancelPendingDequeueOperations() {
}
}
-bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
+bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
if (!isExecuting() || (mFlags & kFlagIsAsync)
|| (newRequest && (mFlags & kFlagDequeueInputPending))) {
PostReplyWithError(replyID, INVALID_OPERATION);
@@ -664,7 +956,7 @@ bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
return true;
}
-bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) {
+bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
sp<AMessage> response = new AMessage;
if (!isExecuting() || (mFlags & kFlagIsAsync)
@@ -874,11 +1166,15 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
mFlags &= ~kFlagUsesSoftwareRenderer;
}
+ String8 resourceType;
if (mComponentName.endsWith(".secure")) {
mFlags |= kFlagIsSecure;
+ resourceType = String8(kResourceSecureCodec);
} else {
mFlags &= ~kFlagIsSecure;
+ resourceType = String8(kResourceNonSecureCodec);
}
+ addResource(resourceType, 1);
(new AMessage)->postReply(mReplyID);
break;
@@ -959,6 +1255,17 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
size_t numBuffers = portDesc->countBuffers();
+ size_t totalSize = 0;
+ for (size_t i = 0; i < numBuffers; ++i) {
+ if (portIndex == kPortIndexInput && mCrypto != NULL) {
+ totalSize += portDesc->bufferAt(i)->capacity();
+ }
+ }
+
+ if (totalSize) {
+ mDealer = new MemoryDealer(totalSize, "MediaCodec");
+ }
+
for (size_t i = 0; i < numBuffers; ++i) {
BufferInfo info;
info.mBufferID = portDesc->bufferIDAt(i);
@@ -966,8 +1273,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
info.mData = portDesc->bufferAt(i);
if (portIndex == kPortIndexInput && mCrypto != NULL) {
+ sp<IMemory> mem = mDealer->allocate(info.mData->capacity());
info.mEncryptedData =
- new ABuffer(info.mData->capacity());
+ new ABuffer(mem->pointer(), info.mData->capacity());
+ info.mSharedEncryptedBuffer = mem;
}
buffers->push_back(info);
@@ -978,6 +1287,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// We're always allocating output buffers after
// allocating input buffers, so this is a good
// indication that now all buffers are allocated.
+ if (mIsVideo) {
+ addResource(kResourceGraphicMemory, getGraphicBufferSize());
+ }
setState(STARTED);
(new AMessage)->postReply(mReplyID);
} else {
@@ -1157,6 +1469,8 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
mFlags &= ~kFlagIsComponentAllocated;
+ mResourceManagerService->removeResource(getId(mResourceManagerClient));
+
(new AMessage)->postReply(mReplyID);
break;
}
@@ -1188,7 +1502,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatInit:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState != UNINITIALIZED) {
@@ -1224,7 +1538,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSetCallback:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState == UNINITIALIZED
@@ -1256,7 +1570,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatConfigure:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState != INITIALIZED) {
@@ -1313,7 +1627,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatCreateInputSurface:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
// Must be configured, but can't have been started yet.
@@ -1329,7 +1643,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatStart:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState == FLUSHED) {
@@ -1355,7 +1669,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
State targetState =
(msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
@@ -1403,7 +1717,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDequeueInputBuffer:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mFlags & kFlagIsAsync) {
@@ -1435,7 +1749,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
if (timeoutUs > 0ll) {
sp<AMessage> timeoutMsg =
- new AMessage(kWhatDequeueInputTimedOut, id());
+ new AMessage(kWhatDequeueInputTimedOut, this);
timeoutMsg->setInt32(
"generation", ++mDequeueInputTimeoutGeneration);
timeoutMsg->post(timeoutUs);
@@ -1464,7 +1778,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatQueueInputBuffer:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
@@ -1483,7 +1797,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDequeueOutputBuffer:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mFlags & kFlagIsAsync) {
@@ -1509,7 +1823,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
if (timeoutUs > 0ll) {
sp<AMessage> timeoutMsg =
- new AMessage(kWhatDequeueOutputTimedOut, id());
+ new AMessage(kWhatDequeueOutputTimedOut, this);
timeoutMsg->setInt32(
"generation", ++mDequeueOutputTimeoutGeneration);
timeoutMsg->post(timeoutUs);
@@ -1538,7 +1852,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatReleaseOutputBuffer:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
@@ -1557,7 +1871,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSignalEndOfInputStream:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
@@ -1575,10 +1889,14 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatGetBuffers:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ // Unfortunately widevine legacy source requires knowing all of the
+ // codec input buffers, so we have to provide them even in async mode.
+ int32_t widevine = 0;
+ msg->findInt32("widevine", &widevine);
- if (!isExecuting() || (mFlags & kFlagIsAsync)) {
+ if (!isExecuting() || ((mFlags & kFlagIsAsync) && !widevine)) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -1609,7 +1927,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatFlush:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
@@ -1635,7 +1953,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
sp<AMessage> format =
(msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if ((mState != CONFIGURED && mState != STARTING &&
@@ -1672,7 +1990,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatGetName:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mComponentName.empty()) {
@@ -1688,7 +2006,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSetParameters:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
sp<AMessage> params;
@@ -1742,7 +2060,7 @@ status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
AString errorDetailMsg;
- sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", bufferIndex);
msg->setSize("offset", 0);
msg->setSize("size", csd->size());
@@ -1943,7 +2261,8 @@ status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
key,
iv,
mode,
- info->mEncryptedData->base() + offset,
+ info->mSharedEncryptedBuffer,
+ offset,
subSamples,
numSubSamples,
info->mData->base(),
@@ -2197,7 +2516,7 @@ void MediaCodec::postActivityNotificationIfPossible() {
}
status_t MediaCodec::setParameters(const sp<AMessage> &params) {
- sp<AMessage> msg = new AMessage(kWhatSetParameters, id());
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
msg->setMessage("params", params);
sp<AMessage> response;
@@ -2253,12 +2572,6 @@ status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
void MediaCodec::updateBatteryStat() {
if (mState == CONFIGURED && !mBatteryStatNotified) {
- AString mime;
- CHECK(mOutputFormat != NULL &&
- mOutputFormat->findString("mime", &mime));
-
- mIsVideo = mime.startsWithIgnoreCase("video/");
-
BatteryNotifier& notifier(BatteryNotifier::getInstance());
if (mIsVideo) {
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index cf6e937..26798ae 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "MediaCodecList"
#include <utils/Log.h>
+#include "MediaCodecListOverrides.h"
+
#include <binder/IServiceManager.h>
#include <media/IMediaCodecList.h>
@@ -31,6 +33,7 @@
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
+#include <sys/stat.h>
#include <utils/threads.h>
#include <libexpat/expat.h>
@@ -41,21 +44,58 @@ static Mutex sInitMutex;
static MediaCodecList *gCodecList = NULL;
+static const char *kProfilingResults = "/data/misc/media/media_codecs_profiling_results.xml";
+
+static bool parseBoolean(const char *s) {
+ if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) {
+ return true;
+ }
+ char *end;
+ unsigned long res = strtoul(s, &end, 10);
+ return *s != '\0' && *end == '\0' && res > 0;
+}
+
// static
sp<IMediaCodecList> MediaCodecList::sCodecList;
// static
sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
- Mutex::Autolock autoLock(sInitMutex);
-
- if (gCodecList == NULL) {
- gCodecList = new MediaCodecList;
- if (gCodecList->initCheck() == OK) {
- sCodecList = gCodecList;
+ bool profilingNeeded = false;
+ KeyedVector<AString, CodecSettings> updates;
+ Vector<sp<MediaCodecInfo>> infos;
+
+ {
+ Mutex::Autolock autoLock(sInitMutex);
+
+ if (gCodecList == NULL) {
+ gCodecList = new MediaCodecList;
+ if (gCodecList->initCheck() == OK) {
+ sCodecList = gCodecList;
+
+ struct stat s;
+ if (stat(kProfilingResults, &s) == -1) {
+ // profiling results doesn't existed
+ profilingNeeded = true;
+ for (size_t i = 0; i < gCodecList->countCodecs(); ++i) {
+ infos.push_back(gCodecList->getCodecInfo(i));
+ }
+ }
+ }
}
}
- return sCodecList;
+ if (profilingNeeded) {
+ profileCodecs(infos, &updates);
+ }
+
+ {
+ Mutex::Autolock autoLock(sInitMutex);
+ if (updates.size() > 0) {
+ gCodecList->updateDetailsForMultipleCodecs(updates);
+ }
+
+ return sCodecList;
+ }
}
static Mutex sRemoteInitMutex;
@@ -94,11 +134,27 @@ sp<IMediaCodecList> MediaCodecList::getInstance() {
}
MediaCodecList::MediaCodecList()
- : mInitCheck(NO_INIT) {
+ : mInitCheck(NO_INIT),
+ mUpdate(false),
+ mGlobalSettings(new AMessage()) {
parseTopLevelXMLFile("/etc/media_codecs.xml");
+ parseTopLevelXMLFile(kProfilingResults, true/* ignore_errors */);
+}
+
+void MediaCodecList::updateDetailsForMultipleCodecs(
+ const KeyedVector<AString, CodecSettings>& updates) {
+ if (updates.size() == 0) {
+ return;
+ }
+
+ exportResultsToXML(kProfilingResults, updates);
+
+ for (size_t i = 0; i < updates.size(); ++i) {
+ applyCodecSettings(updates.keyAt(i), updates.valueAt(i), &mCodecInfos);
+ }
}
-void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
+void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml, bool ignore_errors) {
// get href_base
char *href_base_end = strrchr(codecs_xml, '/');
if (href_base_end != NULL) {
@@ -119,13 +175,16 @@ void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
mOMX.clear();
if (mInitCheck != OK) {
+ if (ignore_errors) {
+ mInitCheck = OK;
+ return;
+ }
mCodecInfos.clear();
return;
}
for (size_t i = mCodecInfos.size(); i-- > 0;) {
const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
-
if (info.mCaps.size() == 0) {
// No types supported by this component???
ALOGW("Component %s does not support any type of media?",
@@ -169,6 +228,16 @@ void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
}
ALOGV(" levels=[%s]", nice.c_str());
}
+ {
+ AString quirks;
+ for (size_t ix = 0; ix < info.mQuirks.size(); ix++) {
+ if (ix > 0) {
+ quirks.append(", ");
+ }
+ quirks.append(info.mQuirks[ix]);
+ }
+ ALOGV(" quirks=[%s]", quirks.c_str());
+ }
}
#endif
}
@@ -328,6 +397,16 @@ void MediaCodecList::startElementHandler(
mCurrentSection = SECTION_DECODERS;
} else if (!strcmp(name, "Encoders")) {
mCurrentSection = SECTION_ENCODERS;
+ } else if (!strcmp(name, "Settings")) {
+ mCurrentSection = SECTION_SETTINGS;
+ }
+ break;
+ }
+
+ case SECTION_SETTINGS:
+ {
+ if (!strcmp(name, "Setting")) {
+ mInitCheck = addSettingFromAttributes(attrs);
}
break;
}
@@ -397,6 +476,14 @@ void MediaCodecList::endElementHandler(const char *name) {
}
switch (mCurrentSection) {
+ case SECTION_SETTINGS:
+ {
+ if (!strcmp(name, "Settings")) {
+ mCurrentSection = SECTION_TOPLEVEL;
+ }
+ break;
+ }
+
case SECTION_DECODERS:
{
if (!strcmp(name, "Decoders")) {
@@ -462,10 +549,10 @@ void MediaCodecList::endElementHandler(const char *name) {
--mDepth;
}
-status_t MediaCodecList::addMediaCodecFromAttributes(
- bool encoder, const char **attrs) {
+status_t MediaCodecList::addSettingFromAttributes(const char **attrs) {
const char *name = NULL;
- const char *type = NULL;
+ const char *value = NULL;
+ const char *update = NULL;
size_t i = 0;
while (attrs[i] != NULL) {
@@ -475,11 +562,17 @@ status_t MediaCodecList::addMediaCodecFromAttributes(
}
name = attrs[i + 1];
++i;
- } else if (!strcmp(attrs[i], "type")) {
+ } else if (!strcmp(attrs[i], "value")) {
if (attrs[i + 1] == NULL) {
return -EINVAL;
}
- type = attrs[i + 1];
+ value = attrs[i + 1];
+ ++i;
+ } else if (!strcmp(attrs[i], "update")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ update = attrs[i + 1];
++i;
} else {
return -EINVAL;
@@ -488,10 +581,34 @@ status_t MediaCodecList::addMediaCodecFromAttributes(
++i;
}
- if (name == NULL) {
+ if (name == NULL || value == NULL) {
return -EINVAL;
}
+ mUpdate = (update != NULL) && parseBoolean(update);
+ if (mUpdate != mGlobalSettings->contains(name)) {
+ return -EINVAL;
+ }
+
+ mGlobalSettings->setString(name, value);
+ return OK;
+}
+
+void MediaCodecList::setCurrentCodecInfo(bool encoder, const char *name, const char *type) {
+ for (size_t i = 0; i < mCodecInfos.size(); ++i) {
+ if (AString(name) == mCodecInfos[i]->getCodecName()) {
+ if (mCodecInfos[i]->getCapabilitiesFor(type) == NULL) {
+ ALOGW("Overrides with an unexpected mime %s", type);
+ // Create a new MediaCodecInfo (but don't add it to mCodecInfos) to hold the
+ // overrides we don't want.
+ mCurrentInfo = new MediaCodecInfo(name, encoder, type);
+ } else {
+ mCurrentInfo = mCodecInfos.editItemAt(i);
+ mCurrentInfo->updateMime(type); // to set the current cap
+ }
+ return;
+ }
+ }
mCurrentInfo = new MediaCodecInfo(name, encoder, type);
// The next step involves trying to load the codec, which may
// fail. Only list the codec if this succeeds.
@@ -500,6 +617,78 @@ status_t MediaCodecList::addMediaCodecFromAttributes(
if (initializeCapabilities(type) == OK) {
mCodecInfos.push_back(mCurrentInfo);
}
+}
+
+status_t MediaCodecList::addMediaCodecFromAttributes(
+ bool encoder, const char **attrs) {
+ const char *name = NULL;
+ const char *type = NULL;
+ const char *update = NULL;
+
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "name")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ name = attrs[i + 1];
+ ++i;
+ } else if (!strcmp(attrs[i], "type")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ type = attrs[i + 1];
+ ++i;
+ } else if (!strcmp(attrs[i], "update")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ update = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+
+ ++i;
+ }
+
+ if (name == NULL) {
+ return -EINVAL;
+ }
+
+ mUpdate = (update != NULL) && parseBoolean(update);
+ ssize_t index = -1;
+ for (size_t i = 0; i < mCodecInfos.size(); ++i) {
+ if (AString(name) == mCodecInfos[i]->getCodecName()) {
+ index = i;
+ }
+ }
+ if (mUpdate != (index >= 0)) {
+ return -EINVAL;
+ }
+
+ if (index >= 0) {
+ // existing codec
+ mCurrentInfo = mCodecInfos.editItemAt(index);
+ if (type != NULL) {
+ // existing type
+ if (mCodecInfos[index]->getCapabilitiesFor(type) == NULL) {
+ return -EINVAL;
+ }
+ mCurrentInfo->updateMime(type);
+ }
+ } else {
+ // new codec
+ mCurrentInfo = new MediaCodecInfo(name, encoder, type);
+ // The next step involves trying to load the codec, which may
+ // fail. Only list the codec if this succeeds.
+ // However, keep mCurrentInfo object around until parsing
+ // of full codec info is completed.
+ if (initializeCapabilities(type) == OK) {
+ mCodecInfos.push_back(mCurrentInfo);
+ }
+ }
+
return OK;
}
@@ -553,6 +742,7 @@ status_t MediaCodecList::addQuirk(const char **attrs) {
status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
const char *name = NULL;
+ const char *update = NULL;
size_t i = 0;
while (attrs[i] != NULL) {
@@ -562,6 +752,12 @@ status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
}
name = attrs[i + 1];
++i;
+ } else if (!strcmp(attrs[i], "update")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ update = attrs[i + 1];
+ ++i;
} else {
return -EINVAL;
}
@@ -573,14 +769,25 @@ status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
return -EINVAL;
}
- status_t ret = mCurrentInfo->addMime(name);
+ bool isExistingType = (mCurrentInfo->getCapabilitiesFor(name) != NULL);
+ if (mUpdate != isExistingType) {
+ return -EINVAL;
+ }
+
+ status_t ret;
+ if (mUpdate) {
+ ret = mCurrentInfo->updateMime(name);
+ } else {
+ ret = mCurrentInfo->addMime(name);
+ }
+
if (ret != OK) {
return ret;
}
// The next step involves trying to load the codec, which may
// fail. Handle this gracefully (by not reporting such mime).
- if (initializeCapabilities(name) != OK) {
+ if (!mUpdate && initializeCapabilities(name) != OK) {
mCurrentInfo->removeMime(name);
}
return OK;
@@ -758,7 +965,8 @@ status_t MediaCodecList::addLimit(const char **attrs) {
return limitFoundMissingAttr(name, "ranges", found);
} else if (msg->contains("scale")) {
return limitFoundMissingAttr(name, "scale");
- } else if ((name == "alignment" || name == "block-size") ^
+ } else if ((name == "alignment" || name == "block-size"
+ || name == "max-supported-instances") ^
(found = msg->findString("value", &value))) {
return limitFoundMissingAttr(name, "value", found);
}
@@ -780,15 +988,6 @@ status_t MediaCodecList::addLimit(const char **attrs) {
return OK;
}
-static bool parseBoolean(const char *s) {
- if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) {
- return true;
- }
- char *end;
- unsigned long res = strtoul(s, &end, 10);
- return *s != '\0' && *end == '\0' && res > 0;
-}
-
status_t MediaCodecList::addFeature(const char **attrs) {
size_t i = 0;
const char *name = NULL;
@@ -860,4 +1059,8 @@ size_t MediaCodecList::countCodecs() const {
return mCodecInfos.size();
}
+const sp<AMessage> MediaCodecList::getGlobalSettings() const {
+ return mGlobalSettings;
+}
+
} // namespace android
diff --git a/media/libstagefright/MediaCodecListOverrides.cpp b/media/libstagefright/MediaCodecListOverrides.cpp
new file mode 100644
index 0000000..3c54f34
--- /dev/null
+++ b/media/libstagefright/MediaCodecListOverrides.cpp
@@ -0,0 +1,404 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecListOverrides"
+#include <utils/Log.h>
+
+#include "MediaCodecListOverrides.h"
+
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/IMediaCodecList.h>
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodec.h>
+
+namespace android {
+
+// a limit to avoid allocating unreasonable number of codec instances in the measurement.
+// this should be in sync with the MAX_SUPPORTED_INSTANCES defined in MediaCodecInfo.java.
+static const int kMaxInstances = 32;
+
+// TODO: move MediaCodecInfo to C++. Until then, some temp methods to parse out info.
+static bool getMeasureSize(sp<MediaCodecInfo::Capabilities> caps, int32_t *width, int32_t *height) {
+ AString sizeRange;
+ if (!caps->getDetails()->findString("size-range", &sizeRange)) {
+ return false;
+ }
+ AString minSize;
+ AString maxSize;
+ if (!splitString(sizeRange, "-", &minSize, &maxSize)) {
+ return false;
+ }
+ AString sWidth;
+ AString sHeight;
+ if (!splitString(minSize, "x", &sWidth, &sHeight)) {
+ if (!splitString(minSize, "*", &sWidth, &sHeight)) {
+ return false;
+ }
+ }
+
+ *width = strtol(sWidth.c_str(), NULL, 10);
+ *height = strtol(sHeight.c_str(), NULL, 10);
+ return (*width > 0) && (*height > 0);
+}
+
+static void getMeasureBitrate(sp<MediaCodecInfo::Capabilities> caps, int32_t *bitrate) {
+ // Until have native MediaCodecInfo, we cannot get bitrates based on profile/levels.
+ // We use 200000 as default value for our measurement.
+ *bitrate = 200000;
+ AString bitrateRange;
+ if (!caps->getDetails()->findString("bitrate-range", &bitrateRange)) {
+ return;
+ }
+ AString minBitrate;
+ AString maxBitrate;
+ if (!splitString(bitrateRange, "-", &minBitrate, &maxBitrate)) {
+ return;
+ }
+
+ *bitrate = strtol(minBitrate.c_str(), NULL, 10);
+}
+
+static sp<AMessage> getMeasureFormat(
+ bool isEncoder, AString mime, sp<MediaCodecInfo::Capabilities> caps) {
+ sp<AMessage> format = new AMessage();
+ format->setString("mime", mime);
+
+ if (isEncoder) {
+ int32_t bitrate = 0;
+ getMeasureBitrate(caps, &bitrate);
+ format->setInt32("bitrate", bitrate);
+ }
+
+ if (mime.startsWith("video/")) {
+ int32_t width = 0;
+ int32_t height = 0;
+ if (!getMeasureSize(caps, &width, &height)) {
+ return NULL;
+ }
+ format->setInt32("width", width);
+ format->setInt32("height", height);
+
+ Vector<uint32_t> colorFormats;
+ caps->getSupportedColorFormats(&colorFormats);
+ if (colorFormats.size() == 0) {
+ return NULL;
+ }
+ format->setInt32("color-format", colorFormats[0]);
+
+ format->setFloat("frame-rate", 10.0);
+ format->setInt32("i-frame-interval", 10);
+ } else {
+ // TODO: profile hw audio
+ return NULL;
+ }
+
+ return format;
+}
+
+static size_t doProfileCodecs(
+ bool isEncoder, AString name, AString mime, sp<MediaCodecInfo::Capabilities> caps) {
+ sp<AMessage> format = getMeasureFormat(isEncoder, mime, caps);
+ if (format == NULL) {
+ return 0;
+ }
+ if (isEncoder) {
+ format->setInt32("encoder", 1);
+ }
+ ALOGV("doProfileCodecs %s %s %s %s",
+ name.c_str(), mime.c_str(), isEncoder ? "encoder" : "decoder",
+ format->debugString().c_str());
+
+ status_t err = OK;
+ Vector<sp<MediaCodec>> codecs;
+ while (err == OK && codecs.size() < kMaxInstances) {
+ sp<ALooper> looper = new ALooper;
+ looper->setName("MediaCodec_looper");
+ ALOGV("doProfileCodecs for codec #%u", codecs.size());
+ ALOGV("doProfileCodecs start looper");
+ looper->start(
+ false /* runOnCallingThread */, false /* canCallJava */, ANDROID_PRIORITY_AUDIO);
+ ALOGV("doProfileCodecs CreateByComponentName");
+ sp<MediaCodec> codec = MediaCodec::CreateByComponentName(looper, name.c_str(), &err);
+ if (err != OK) {
+ ALOGV("Failed to create codec: %s", name.c_str());
+ break;
+ }
+ const sp<Surface> nativeWindow;
+ const sp<ICrypto> crypto;
+ uint32_t flags = 0;
+ ALOGV("doProfileCodecs configure");
+ err = codec->configure(format, nativeWindow, crypto, flags);
+ if (err != OK) {
+ ALOGV("Failed to configure codec: %s with mime: %s", name.c_str(), mime.c_str());
+ codec->release();
+ break;
+ }
+ ALOGV("doProfileCodecs start");
+ err = codec->start();
+ if (err != OK) {
+ ALOGV("Failed to start codec: %s with mime: %s", name.c_str(), mime.c_str());
+ codec->release();
+ break;
+ }
+ codecs.push_back(codec);
+ }
+
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ ALOGV("doProfileCodecs release %s", name.c_str());
+ err = codecs[i]->release();
+ if (err != OK) {
+ ALOGE("Failed to release codec: %s with mime: %s", name.c_str(), mime.c_str());
+ }
+ }
+
+ return codecs.size();
+}
+
+static void printLongString(const char *buf, size_t size) {
+ AString print;
+ const char *start = buf;
+ size_t len;
+ size_t totalLen = size;
+ while (totalLen > 0) {
+ len = (totalLen > 500) ? 500 : totalLen;
+ print.setTo(start, len);
+ ALOGV("%s", print.c_str());
+ totalLen -= len;
+ start += len;
+ }
+}
+
+bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2) {
+ ssize_t pos = s.find(delimiter.c_str());
+ if (pos < 0) {
+ return false;
+ }
+ *s1 = AString(s, 0, pos);
+ *s2 = AString(s, pos + 1, s.size() - pos - 1);
+ return true;
+}
+
+bool splitString(
+ const AString &s, const AString &delimiter, AString *s1, AString *s2, AString *s3) {
+ AString temp;
+ if (!splitString(s, delimiter, s1, &temp)) {
+ return false;
+ }
+ if (!splitString(temp, delimiter, s2, s3)) {
+ return false;
+ }
+ return true;
+}
+
+void profileCodecs(
+ const Vector<sp<MediaCodecInfo>> &infos,
+ KeyedVector<AString, CodecSettings> *results,
+ bool forceToMeasure) {
+ KeyedVector<AString, sp<MediaCodecInfo::Capabilities>> codecsNeedMeasure;
+ for (size_t i = 0; i < infos.size(); ++i) {
+ const sp<MediaCodecInfo> info = infos[i];
+ AString name = info->getCodecName();
+ if (name.startsWith("OMX.google.") ||
+ // TODO: reenable below codecs once fixed
+ name == "OMX.Intel.VideoDecoder.VP9.hybrid") {
+ continue;
+ }
+
+ Vector<AString> mimes;
+ info->getSupportedMimes(&mimes);
+ for (size_t i = 0; i < mimes.size(); ++i) {
+ const sp<MediaCodecInfo::Capabilities> &caps =
+ info->getCapabilitiesFor(mimes[i].c_str());
+ if (!forceToMeasure && caps->getDetails()->contains("max-supported-instances")) {
+ continue;
+ }
+
+ size_t max = doProfileCodecs(info->isEncoder(), name, mimes[i], caps);
+ if (max > 0) {
+ CodecSettings settings;
+ char maxStr[32];
+ sprintf(maxStr, "%u", max);
+ settings.add("max-supported-instances", maxStr);
+
+ AString key = name;
+ key.append(" ");
+ key.append(mimes[i]);
+ key.append(" ");
+ key.append(info->isEncoder() ? "encoder" : "decoder");
+ results->add(key, settings);
+ }
+ }
+ }
+}
+
+void applyCodecSettings(
+ const AString& codecInfo,
+ const CodecSettings &settings,
+ Vector<sp<MediaCodecInfo>> *infos) {
+ AString name;
+ AString mime;
+ AString type;
+ if (!splitString(codecInfo, " ", &name, &mime, &type)) {
+ return;
+ }
+
+ for (size_t i = 0; i < infos->size(); ++i) {
+ const sp<MediaCodecInfo> &info = infos->itemAt(i);
+ if (name != info->getCodecName()) {
+ continue;
+ }
+
+ Vector<AString> mimes;
+ info->getSupportedMimes(&mimes);
+ for (size_t j = 0; j < mimes.size(); ++j) {
+ if (mimes[j] != mime) {
+ continue;
+ }
+ const sp<MediaCodecInfo::Capabilities> &caps = info->getCapabilitiesFor(mime.c_str());
+ for (size_t k = 0; k < settings.size(); ++k) {
+ caps->getDetails()->setString(
+ settings.keyAt(k).c_str(), settings.valueAt(k).c_str());
+ }
+ }
+ }
+}
+
+void exportResultsToXML(const char *fileName, const KeyedVector<AString, CodecSettings>& results) {
+#if LOG_NDEBUG == 0
+ ALOGE("measurement results");
+ for (size_t i = 0; i < results.size(); ++i) {
+ ALOGE("key %s", results.keyAt(i).c_str());
+ const CodecSettings &settings = results.valueAt(i);
+ for (size_t j = 0; j < settings.size(); ++j) {
+ ALOGE("name %s value %s", settings.keyAt(j).c_str(), settings.valueAt(j).c_str());
+ }
+ }
+#endif
+
+ AString overrides;
+ FILE *f = fopen(fileName, "rb");
+ if (f != NULL) {
+ fseek(f, 0, SEEK_END);
+ long size = ftell(f);
+ rewind(f);
+
+ char *buf = (char *)malloc(size);
+ if (fread(buf, size, 1, f) == 1) {
+ overrides.setTo(buf, size);
+#if LOG_NDEBUG == 0
+ ALOGV("Existing overrides:");
+ printLongString(buf, size);
+#endif
+ } else {
+ ALOGE("Failed to read %s", fileName);
+ }
+ fclose(f);
+ free(buf);
+ }
+
+ for (size_t i = 0; i < results.size(); ++i) {
+ AString name;
+ AString mime;
+ AString type;
+ if (!splitString(results.keyAt(i), " ", &name, &mime, &type)) {
+ continue;
+ }
+ name = AStringPrintf("\"%s\"", name.c_str());
+ mime = AStringPrintf("\"%s\"", mime.c_str());
+ ALOGV("name(%s) mime(%s) type(%s)", name.c_str(), mime.c_str(), type.c_str());
+ ssize_t posCodec = overrides.find(name.c_str());
+ size_t posInsert = 0;
+ if (posCodec < 0) {
+ AString encodersDecoders = (type == "encoder") ? "<Encoders>" : "<Decoders>";
+ AString encodersDecodersEnd = (type == "encoder") ? "</Encoders>" : "</Decoders>";
+ ssize_t posEncodersDecoders = overrides.find(encodersDecoders.c_str());
+ if (posEncodersDecoders < 0) {
+ AString mediaCodecs = "<MediaCodecs>";
+ ssize_t posMediaCodec = overrides.find(mediaCodecs.c_str());
+ if (posMediaCodec < 0) {
+ posMediaCodec = overrides.size();
+ overrides.insert("\n<MediaCodecs>\n</MediaCodecs>\n", posMediaCodec);
+ posMediaCodec = overrides.find(mediaCodecs.c_str(), posMediaCodec);
+ }
+ posEncodersDecoders = posMediaCodec + mediaCodecs.size();
+ AString codecs = AStringPrintf(
+ "\n %s\n %s", encodersDecoders.c_str(), encodersDecodersEnd.c_str());
+ overrides.insert(codecs.c_str(), posEncodersDecoders);
+ posEncodersDecoders = overrides.find(encodersDecoders.c_str(), posEncodersDecoders);
+ }
+ posCodec = posEncodersDecoders + encodersDecoders.size();
+ AString codec = AStringPrintf(
+ "\n <MediaCodec name=%s type=%s update=\"true\" >\n </MediaCodec>",
+ name.c_str(),
+ mime.c_str());
+ overrides.insert(codec.c_str(), posCodec);
+ posCodec = overrides.find(name.c_str());
+ }
+
+ // insert to existing entry
+ ssize_t posMime = overrides.find(mime.c_str(), posCodec);
+ ssize_t posEnd = overrides.find(">", posCodec);
+ if (posEnd < 0) {
+ ALOGE("Format error in overrides file.");
+ return;
+ }
+ if (posMime < 0 || posMime > posEnd) {
+ // new mime for an existing component
+ AString codecEnd = "</MediaCodec>";
+ posInsert = overrides.find(codecEnd.c_str(), posCodec) + codecEnd.size();
+ AString codec = AStringPrintf(
+ "\n <MediaCodec name=%s type=%s update=\"true\" >\n </MediaCodec>",
+ name.c_str(),
+ mime.c_str());
+ overrides.insert(codec.c_str(), posInsert);
+ posInsert = overrides.find(">", posInsert) + 1;
+ } else {
+ posInsert = posEnd + 1;
+ }
+
+ CodecSettings settings = results.valueAt(i);
+ for (size_t i = 0; i < settings.size(); ++i) {
+ // WARNING: we assume all the settings are "Limit". Currently we have only one type
+ // of setting in this case, which is "max-supported-instances".
+ AString strInsert = AStringPrintf(
+ "\n <Limit name=\"%s\" value=\"%s\" />",
+ settings.keyAt(i).c_str(),
+ settings.valueAt(i).c_str());
+ overrides.insert(strInsert, posInsert);
+ }
+ }
+
+#if LOG_NDEBUG == 0
+ ALOGV("New overrides:");
+ printLongString(overrides.c_str(), overrides.size());
+#endif
+
+ f = fopen(fileName, "wb");
+ if (f == NULL) {
+ ALOGE("Failed to open %s for writing.", fileName);
+ return;
+ }
+ if (fwrite(overrides.c_str(), 1, overrides.size(), f) != overrides.size()) {
+ ALOGE("Failed to write to %s.", fileName);
+ }
+ fclose(f);
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaCodecListOverrides.h b/media/libstagefright/MediaCodecListOverrides.h
new file mode 100644
index 0000000..f97ce63
--- /dev/null
+++ b/media/libstagefright/MediaCodecListOverrides.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CODEC_LIST_OVERRIDES_H_
+
+#define MEDIA_CODEC_LIST_OVERRIDES_H_
+
+#include <media/MediaCodecInfo.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <utils/StrongPointer.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+class MediaCodecInfo;
+
+bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2);
+
+bool splitString(
+ const AString &s, const AString &delimiter, AString *s1, AString *s2, AString *s3);
+
+void profileCodecs(
+ const Vector<sp<MediaCodecInfo>> &infos,
+ KeyedVector<AString, CodecSettings> *results,
+ bool forceToMeasure = false); // forceToMeasure is mainly for testing
+
+void applyCodecSettings(
+ const AString& codecInfo,
+ const CodecSettings &settings,
+ Vector<sp<MediaCodecInfo>> *infos);
+
+void exportResultsToXML(const char *fileName, const KeyedVector<AString, CodecSettings>& results);
+
+} // namespace android
+
+#endif // MEDIA_CODEC_LIST_OVERRIDES_H_
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index c26e909..6568d25 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -121,7 +121,7 @@ status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
mLooper->registerHandler(this);
mNotify = notify;
- sp<AMessage> msg = new AMessage(kWhatStart, id());
+ sp<AMessage> msg = new AMessage(kWhatStart, this);
msg->setObject("meta", meta);
return postSynchronouslyAndReturnError(msg);
}
@@ -137,19 +137,19 @@ void MediaCodecSource::Puller::stop() {
mSource->stop();
ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
- (new AMessage(kWhatStop, id()))->post();
+ (new AMessage(kWhatStop, this))->post();
}
void MediaCodecSource::Puller::pause() {
- (new AMessage(kWhatPause, id()))->post();
+ (new AMessage(kWhatPause, this))->post();
}
void MediaCodecSource::Puller::resume() {
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
}
void MediaCodecSource::Puller::schedulePull() {
- sp<AMessage> msg = new AMessage(kWhatPull, id());
+ sp<AMessage> msg = new AMessage(kWhatPull, this);
msg->setInt32("generation", mPullGeneration);
msg->post();
}
@@ -182,7 +182,7 @@ void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
break;
@@ -269,13 +269,13 @@ sp<MediaCodecSource> MediaCodecSource::Create(
}
status_t MediaCodecSource::start(MetaData* params) {
- sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
msg->setObject("meta", params);
return postSynchronouslyAndReturnError(msg);
}
status_t MediaCodecSource::stop() {
- sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
status_t err = postSynchronouslyAndReturnError(msg);
// mPuller->stop() needs to be done outside MediaCodecSource's looper,
@@ -294,7 +294,7 @@ status_t MediaCodecSource::stop() {
}
status_t MediaCodecSource::pause() {
- (new AMessage(kWhatPause, mReflector->id()))->post();
+ (new AMessage(kWhatPause, mReflector))->post();
return OK;
}
@@ -399,6 +399,9 @@ status_t MediaCodecSource::initEncoder() {
ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+ mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
+ mEncoder->setCallback(mEncoderActivityNotify);
+
status_t err = mEncoder->configure(
mOutputFormat,
NULL /* nativeWindow */,
@@ -422,10 +425,6 @@ status_t MediaCodecSource::initEncoder() {
}
}
- mEncoderActivityNotify = new AMessage(
- kWhatEncoderActivity, mReflector->id());
- mEncoder->setCallback(mEncoderActivityNotify);
-
err = mEncoder->start();
if (err != OK) {
@@ -492,7 +491,7 @@ void MediaCodecSource::signalEOS(status_t err) {
if (mStopping && mEncoderReachedEOS) {
ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
// posting reply to everyone that's waiting
- List<uint32_t>::iterator it;
+ List<sp<AReplyToken>>::iterator it;
for (it = mStopReplyIDQueue.begin();
it != mStopReplyIDQueue.end(); it++) {
(new AMessage)->postReply(*it);
@@ -620,8 +619,7 @@ status_t MediaCodecSource::onStart(MetaData *params) {
resume(startTimeUs);
} else {
CHECK(mPuller != NULL);
- sp<AMessage> notify = new AMessage(
- kWhatPullerNotify, mReflector->id());
+ sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
err = mPuller->start(params, notify);
if (err != OK) {
return err;
@@ -768,7 +766,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
}
case kWhatStart:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
sp<RefBase> obj;
@@ -784,7 +782,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
{
ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mEncoderReachedEOS) {
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index c7c6f34..b13877d 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -38,21 +38,6 @@
namespace android {
-MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
- : mFormat(format),
- mState(UNINITIALIZED) {
- if (format == OUTPUT_FORMAT_MPEG_4) {
- mWriter = new MPEG4Writer(path);
- } else if (format == OUTPUT_FORMAT_WEBM) {
- mWriter = new WebmWriter(path);
- }
-
- if (mWriter != NULL) {
- mFileMeta = new MetaData;
- mState = INITIALIZED;
- }
-}
-
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
: mFormat(format),
mState(UNINITIALIZED) {
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
new file mode 100644
index 0000000..7b6c7d9
--- /dev/null
+++ b/media/libstagefright/MediaSync.cpp
@@ -0,0 +1,541 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSync"
+#include <inttypes.h>
+
+#include <gui/BufferQueue.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <ui/GraphicBuffer.h>
+
+// Maximum late time allowed for a video frame to be rendered. When a video
+// frame arrives later than this number, it will be discarded without rendering.
+static const int64_t kMaxAllowedVideoLateTimeUs = 40000ll;
+
+namespace android {
+
+// static
+sp<MediaSync> MediaSync::create() {
+ sp<MediaSync> sync = new MediaSync();
+ sync->mLooper->registerHandler(sync);
+ return sync;
+}
+
+MediaSync::MediaSync()
+ : mIsAbandoned(false),
+ mMutex(),
+ mReleaseCondition(),
+ mNumOutstandingBuffers(0),
+ mNativeSampleRateInHz(0),
+ mNumFramesWritten(0),
+ mHasAudio(false),
+ mNextBufferItemMediaUs(-1),
+ mPlaybackRate(0.0) {
+ mMediaClock = new MediaClock;
+
+ mLooper = new ALooper;
+ mLooper->setName("MediaSync");
+ mLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+MediaSync::~MediaSync() {
+ if (mInput != NULL) {
+ mInput->consumerDisconnect();
+ }
+ if (mOutput != NULL) {
+ mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
+ }
+
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
+}
+
+status_t MediaSync::configureSurface(const sp<IGraphicBufferProducer> &output) {
+ Mutex::Autolock lock(mMutex);
+
+ // TODO: support suface change.
+ if (mOutput != NULL) {
+ ALOGE("configureSurface: output surface has already been configured.");
+ return INVALID_OPERATION;
+ }
+
+ if (output != NULL) {
+ IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
+ sp<OutputListener> listener(new OutputListener(this));
+ IInterface::asBinder(output)->linkToDeath(listener);
+ status_t status =
+ output->connect(listener,
+ NATIVE_WINDOW_API_MEDIA,
+ true /* producerControlledByApp */,
+ &queueBufferOutput);
+ if (status != NO_ERROR) {
+ ALOGE("configureSurface: failed to connect (%d)", status);
+ return status;
+ }
+
+ mOutput = output;
+ }
+
+ return NO_ERROR;
+}
+
+// |audioTrack| is used only for querying information.
+status_t MediaSync::configureAudioTrack(
+ const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz) {
+ Mutex::Autolock lock(mMutex);
+
+ // TODO: support audio track change.
+ if (mAudioTrack != NULL) {
+ ALOGE("configureAudioTrack: audioTrack has already been configured.");
+ return INVALID_OPERATION;
+ }
+
+ mAudioTrack = audioTrack;
+ mNativeSampleRateInHz = nativeSampleRateInHz;
+
+ return NO_ERROR;
+}
+
+status_t MediaSync::createInputSurface(
+ sp<IGraphicBufferProducer> *outBufferProducer) {
+ if (outBufferProducer == NULL) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (mOutput == NULL) {
+ return NO_INIT;
+ }
+
+ if (mInput != NULL) {
+ return INVALID_OPERATION;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ sp<IGraphicBufferConsumer> bufferConsumer;
+ BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer);
+
+ sp<InputListener> listener(new InputListener(this));
+ IInterface::asBinder(bufferConsumer)->linkToDeath(listener);
+ status_t status =
+ bufferConsumer->consumerConnect(listener, false /* controlledByApp */);
+ if (status == NO_ERROR) {
+ bufferConsumer->setConsumerName(String8("MediaSync"));
+ *outBufferProducer = bufferProducer;
+ mInput = bufferConsumer;
+ }
+ return status;
+}
+
+status_t MediaSync::setPlaybackRate(float rate) {
+ if (rate < 0.0) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (rate > mPlaybackRate) {
+ mNextBufferItemMediaUs = -1;
+ }
+ mPlaybackRate = rate;
+ mMediaClock->setPlaybackRate(rate);
+ onDrainVideo_l();
+
+ return OK;
+}
+
+sp<const MediaClock> MediaSync::getMediaClock() {
+ return mMediaClock;
+}
+
+status_t MediaSync::updateQueuedAudioData(
+ size_t sizeInBytes, int64_t presentationTimeUs) {
+ if (sizeInBytes == 0) {
+ return OK;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (mAudioTrack == NULL) {
+ ALOGW("updateQueuedAudioData: audioTrack has NOT been configured.");
+ return INVALID_OPERATION;
+ }
+
+ int64_t numFrames = sizeInBytes / mAudioTrack->frameSize();
+ int64_t maxMediaTimeUs = presentationTimeUs
+ + getDurationIfPlayedAtNativeSampleRate_l(numFrames);
+ mNumFramesWritten += numFrames;
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t nowMediaUs = maxMediaTimeUs
+ - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten)
+ + getPlayedOutAudioDurationMedia_l(nowUs);
+
+ int64_t oldRealTime = -1;
+ if (mNextBufferItemMediaUs != -1) {
+ oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
+ }
+
+ mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs);
+ mHasAudio = true;
+
+ if (oldRealTime != -1) {
+ int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
+ if (newRealTime < oldRealTime) {
+ mNextBufferItemMediaUs = -1;
+ onDrainVideo_l();
+ }
+ }
+
+ return OK;
+}
+
+void MediaSync::setName(const AString &name) {
+ Mutex::Autolock lock(mMutex);
+ mInput->setConsumerName(String8(name.c_str()));
+}
+
+int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) {
+ int64_t realUs;
+ if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
+ // If failed to get current position, e.g. due to audio clock is
+ // not ready, then just play out video immediately without delay.
+ return nowUs;
+ }
+ return realUs;
+}
+
+int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) {
+ return (numFrames * 1000000LL / mNativeSampleRateInHz);
+}
+
+int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {
+ CHECK(mAudioTrack != NULL);
+
+ uint32_t numFramesPlayed;
+ int64_t numFramesPlayedAt;
+ AudioTimestamp ts;
+ static const int64_t kStaleTimestamp100ms = 100000;
+
+ status_t res = mAudioTrack->getTimestamp(ts);
+ if (res == OK) {
+ // case 1: mixing audio tracks.
+ numFramesPlayed = ts.mPosition;
+ numFramesPlayedAt =
+ ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ const int64_t timestampAge = nowUs - numFramesPlayedAt;
+ if (timestampAge > kStaleTimestamp100ms) {
+ // This is an audio FIXME.
+ // getTimestamp returns a timestamp which may come from audio
+ // mixing threads. After pausing, the MixerThread may go idle,
+ // thus the mTime estimate may become stale. Assuming that the
+ // MixerThread runs 20ms, with FastMixer at 5ms, the max latency
+ // should be about 25ms with an average around 12ms (to be
+ // verified). For safety we use 100ms.
+ ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) "
+ "numFramesPlayedAt(%lld)",
+ (long long)nowUs, (long long)numFramesPlayedAt);
+ numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
+ }
+ //ALOGD("getTimestamp: OK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else if (res == WOULD_BLOCK) {
+ // case 2: transitory state on start of a new track
+ numFramesPlayed = 0;
+ numFramesPlayedAt = nowUs;
+ //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else {
+ // case 3: transitory at new track or audio fast tracks.
+ res = mAudioTrack->getPosition(&numFramesPlayed);
+ CHECK_EQ(res, (status_t)OK);
+ numFramesPlayedAt = nowUs;
+ numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ }
+
+ //can't be negative until 12.4 hrs, test.
+ //CHECK_EQ(numFramesPlayed & (1 << 31), 0);
+ int64_t durationUs =
+ getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed)
+ + nowUs - numFramesPlayedAt;
+ if (durationUs < 0) {
+ // Occurs when numFramesPlayed position is very small and the following:
+ // (1) In case 1, the time nowUs is computed before getTimestamp() is
+ // called and numFramesPlayedAt is greater than nowUs by time more
+ // than numFramesPlayed.
+ // (2) In case 3, using getPosition and adding mAudioTrack->latency()
+ // to numFramesPlayedAt, by a time amount greater than
+ // numFramesPlayed.
+ //
+ // Both of these are transitory conditions.
+ ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld "
+ "set to zero", (long long)durationUs);
+ durationUs = 0;
+ }
+ ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) "
+ "framesAt(%lld)",
+ (long long)durationUs, (long long)nowUs, numFramesPlayed,
+ (long long)numFramesPlayedAt);
+ return durationUs;
+}
+
+void MediaSync::onDrainVideo_l() {
+ if (!isPlaying()) {
+ return;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ while (!mBufferItems.empty()) {
+ BufferItem *bufferItem = &*mBufferItems.begin();
+ int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
+ int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
+ if (itemRealUs <= nowUs) {
+ if (mHasAudio) {
+ if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
+ renderOneBufferItem_l(*bufferItem);
+ } else {
+ // too late.
+ returnBufferToInput_l(
+ bufferItem->mGraphicBuffer, bufferItem->mFence);
+ }
+ } else {
+ // always render video buffer in video-only mode.
+ renderOneBufferItem_l(*bufferItem);
+
+ // smooth out videos >= 10fps
+ mMediaClock->updateAnchor(
+ itemMediaUs, nowUs, itemMediaUs + 100000);
+ }
+
+ mBufferItems.erase(mBufferItems.begin());
+
+ if (mBufferItems.empty()) {
+ mNextBufferItemMediaUs = -1;
+ }
+ } else {
+ if (mNextBufferItemMediaUs == -1
+ || mNextBufferItemMediaUs != itemMediaUs) {
+ sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
+ msg->post(itemRealUs - nowUs);
+ }
+ break;
+ }
+ }
+}
+
+void MediaSync::onFrameAvailableFromInput() {
+ Mutex::Autolock lock(mMutex);
+
+ // If there are too many outstanding buffers, wait until a buffer is
+ // released back to the input in onBufferReleased.
+ while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) {
+ mReleaseCondition.wait(mMutex);
+
+ // If the sync is abandoned while we are waiting, the release
+ // condition variable will be broadcast, and we should just return
+ // without attempting to do anything more (since the input queue will
+ // also be abandoned).
+ if (mIsAbandoned) {
+ return;
+ }
+ }
+ ++mNumOutstandingBuffers;
+
+ // Acquire and detach the buffer from the input.
+ BufferItem bufferItem;
+ status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */);
+ if (status != NO_ERROR) {
+ ALOGE("acquiring buffer from input failed (%d)", status);
+ return;
+ }
+
+ ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId());
+
+ status = mInput->detachBuffer(bufferItem.mBuf);
+ if (status != NO_ERROR) {
+ ALOGE("detaching buffer from input failed (%d)", status);
+ if (status == NO_INIT) {
+ // If the input has been abandoned, move on.
+ onAbandoned_l(true /* isInput */);
+ }
+ return;
+ }
+
+ mBufferItems.push_back(bufferItem);
+ onDrainVideo_l();
+}
+
+void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {
+ IGraphicBufferProducer::QueueBufferInput queueInput(
+ bufferItem.mTimestamp,
+ bufferItem.mIsAutoTimestamp,
+ bufferItem.mDataSpace,
+ bufferItem.mCrop,
+ static_cast<int32_t>(bufferItem.mScalingMode),
+ bufferItem.mTransform,
+ bufferItem.mIsDroppable,
+ bufferItem.mFence);
+
+ // Attach and queue the buffer to the output.
+ int slot;
+ status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer);
+ ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status);
+ if (status == NO_ERROR) {
+ IGraphicBufferProducer::QueueBufferOutput queueOutput;
+ status = mOutput->queueBuffer(slot, queueInput, &queueOutput);
+ ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status);
+ }
+
+ if (status != NO_ERROR) {
+ returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
+ if (status == NO_INIT) {
+ // If the output has been abandoned, move on.
+ onAbandoned_l(false /* isInput */);
+ }
+ return;
+ }
+
+ ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
+}
+
+void MediaSync::onBufferReleasedByOutput() {
+ Mutex::Autolock lock(mMutex);
+
+ sp<GraphicBuffer> buffer;
+ sp<Fence> fence;
+ status_t status = mOutput->detachNextBuffer(&buffer, &fence);
+ ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
+
+ if (status == NO_INIT) {
+ // If the output has been abandoned, we can't do anything else,
+ // since buffer is invalid.
+ onAbandoned_l(false /* isInput */);
+ return;
+ }
+
+ ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
+
+ // If we've been abandoned, we can't return the buffer to the input, so just
+ // move on.
+ if (mIsAbandoned) {
+ return;
+ }
+
+ returnBufferToInput_l(buffer, fence);
+}
+
+void MediaSync::returnBufferToInput_l(
+ const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) {
+ // Attach and release the buffer back to the input.
+ int consumerSlot;
+ status_t status = mInput->attachBuffer(&consumerSlot, buffer);
+ ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
+ if (status == NO_ERROR) {
+ status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
+ }
+
+ if (status != NO_ERROR) {
+ // TODO: do we need to try to return this buffer later?
+ return;
+ }
+
+ ALOGV("released buffer %#llx to input", (long long)buffer->getId());
+
+ // Notify any waiting onFrameAvailable calls.
+ --mNumOutstandingBuffers;
+ mReleaseCondition.signal();
+}
+
+void MediaSync::onAbandoned_l(bool isInput) {
+ ALOGE("the %s has abandoned me", (isInput ? "input" : "output"));
+ if (!mIsAbandoned) {
+ if (isInput) {
+ mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
+ } else {
+ mInput->consumerDisconnect();
+ }
+ mIsAbandoned = true;
+ }
+ mReleaseCondition.broadcast();
+}
+
+void MediaSync::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatDrainVideo:
+ {
+ Mutex::Autolock lock(mMutex);
+ onDrainVideo_l();
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+MediaSync::InputListener::InputListener(const sp<MediaSync> &sync)
+ : mSync(sync) {}
+
+MediaSync::InputListener::~InputListener() {}
+
+void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) {
+ mSync->onFrameAvailableFromInput();
+}
+
+// We don't care about sideband streams, since we won't relay them.
+void MediaSync::InputListener::onSidebandStreamChanged() {
+ ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly.");
+}
+
+
+void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) {
+ Mutex::Autolock lock(mSync->mMutex);
+ mSync->onAbandoned_l(true /* isInput */);
+}
+
+MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync)
+ : mSync(sync) {}
+
+MediaSync::OutputListener::~OutputListener() {}
+
+void MediaSync::OutputListener::onBufferReleased() {
+ mSync->onBufferReleasedByOutput();
+}
+
+void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) {
+ Mutex::Autolock lock(mSync->mMutex);
+ mSync->onAbandoned_l(false /* isInput */);
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 7d7d631..8d70e50 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -226,7 +226,7 @@ NuCachedSource2::NuCachedSource2(
mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
Mutex::Autolock autoLock(mLock);
- (new AMessage(kWhatFetchMore, mReflector->id()))->post();
+ (new AMessage(kWhatFetchMore, mReflector))->post();
}
NuCachedSource2::~NuCachedSource2() {
@@ -433,7 +433,7 @@ void NuCachedSource2::onFetch() {
delayUs = 100000ll;
}
- (new AMessage(kWhatFetchMore, mReflector->id()))->post(delayUs);
+ (new AMessage(kWhatFetchMore, mReflector))->post(delayUs);
}
void NuCachedSource2::onRead(const sp<AMessage> &msg) {
@@ -522,7 +522,7 @@ ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
return size;
}
- sp<AMessage> msg = new AMessage(kWhatRead, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatRead, mReflector);
msg->setInt64("offset", offset);
msg->setPointer("data", data);
msg->setSize("size", size);
diff --git a/media/libstagefright/ProcessInfo.cpp b/media/libstagefright/ProcessInfo.cpp
new file mode 100644
index 0000000..b4172b3
--- /dev/null
+++ b/media/libstagefright/ProcessInfo.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ProcessInfo"
+#include <utils/Log.h>
+
+#include <media/stagefright/ProcessInfo.h>
+
+#include <binder/IProcessInfoService.h>
+#include <binder/IServiceManager.h>
+
+namespace android {
+
+ProcessInfo::ProcessInfo() {}
+
+bool ProcessInfo::getPriority(int pid, int* priority) {
+ sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo"));
+ sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
+
+ size_t length = 1;
+ int32_t states;
+ status_t err = service->getProcessStatesFromPids(length, &pid, &states);
+ if (err != OK) {
+ ALOGE("getProcessStatesFromPids failed");
+ return false;
+ }
+ ALOGV("pid %d states %d", pid, states);
+ if (states < 0) {
+ return false;
+ }
+
+ // Use process state as the priority. Lower the value, higher the priority.
+ *priority = states;
+ return true;
+}
+
+ProcessInfo::~ProcessInfo() {}
+
+} // namespace android
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 6030236..aba64d5 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -234,7 +234,9 @@ status_t SampleTable::setSampleToChunkParams(
return ERROR_OUT_OF_RANGE;
mSampleToChunkEntries =
- new SampleToChunkEntry[mNumSampleToChunkOffsets];
+ new (std::nothrow) SampleToChunkEntry[mNumSampleToChunkOffsets];
+ if (!mSampleToChunkEntries)
+ return ERROR_OUT_OF_RANGE;
for (uint32_t i = 0; i < mNumSampleToChunkOffsets; ++i) {
uint8_t buffer[12];
@@ -337,7 +339,9 @@ status_t SampleTable::setTimeToSampleParams(
if (allocSize > SIZE_MAX) {
return ERROR_OUT_OF_RANGE;
}
- mTimeToSample = new uint32_t[mTimeToSampleCount * 2];
+ mTimeToSample = new (std::nothrow) uint32_t[mTimeToSampleCount * 2];
+ if (!mTimeToSample)
+ return ERROR_OUT_OF_RANGE;
size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2;
if (mDataSource->readAt(
@@ -384,7 +388,9 @@ status_t SampleTable::setCompositionTimeToSampleParams(
return ERROR_OUT_OF_RANGE;
}
- mCompositionTimeDeltaEntries = new uint32_t[2 * numEntries];
+ mCompositionTimeDeltaEntries = new (std::nothrow) uint32_t[2 * numEntries];
+ if (!mCompositionTimeDeltaEntries)
+ return ERROR_OUT_OF_RANGE;
if (mDataSource->readAt(
data_offset + 8, mCompositionTimeDeltaEntries, numEntries * 8)
@@ -434,7 +440,10 @@ status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size)
return ERROR_OUT_OF_RANGE;
}
- mSyncSamples = new uint32_t[mNumSyncSamples];
+ mSyncSamples = new (std::nothrow) uint32_t[mNumSyncSamples];
+ if (!mSyncSamples)
+ return ERROR_OUT_OF_RANGE;
+
size_t size = mNumSyncSamples * sizeof(uint32_t);
if (mDataSource->readAt(mSyncSampleOffset + 8, mSyncSamples, size)
!= (ssize_t)size) {
@@ -502,7 +511,9 @@ void SampleTable::buildSampleEntriesTable() {
return;
}
- mSampleTimeEntries = new SampleTimeEntry[mNumSampleSizes];
+ mSampleTimeEntries = new (std::nothrow) SampleTimeEntry[mNumSampleSizes];
+ if (!mSampleTimeEntries)
+ return;
uint32_t sampleIndex = 0;
uint32_t sampleTime = 0;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 101fc8a..820b2fc 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -519,6 +519,12 @@ void StagefrightMetadataRetriever::parseMetaData() {
mMetaData.add(METADATA_KEY_NUM_TRACKS, String8(tmp));
+ float captureFps;
+ if (meta->findFloat(kKeyCaptureFramerate, &captureFps)) {
+ sprintf(tmp, "%f", captureFps);
+ mMetaData.add(METADATA_KEY_CAPTURE_FRAMERATE, String8(tmp));
+ }
+
bool hasAudio = false;
bool hasVideo = false;
int32_t videoWidth = -1;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index b3a79a0..dfe8ad1 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -166,6 +166,16 @@ status_t convertMetaDataToMessage(
msg->setInt32("max-input-size", maxInputSize);
}
+ int32_t maxWidth;
+ if (meta->findInt32(kKeyMaxWidth, &maxWidth)) {
+ msg->setInt32("max-width", maxWidth);
+ }
+
+ int32_t maxHeight;
+ if (meta->findInt32(kKeyMaxHeight, &maxHeight)) {
+ msg->setInt32("max-height", maxHeight);
+ }
+
int32_t rotationDegrees;
if (meta->findInt32(kKeyRotation, &rotationDegrees)) {
msg->setInt32("rotation-degrees", rotationDegrees);
@@ -344,6 +354,28 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
+
+ if (!meta->findData(kKeyOpusCodecDelay, &type, &data, &size)) {
+ return -EINVAL;
+ }
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-1", buffer);
+
+ if (!meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size)) {
+ return -EINVAL;
+ }
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-2", buffer);
}
*format = msg;
@@ -546,6 +578,16 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
meta->setInt32(kKeyMaxInputSize, maxInputSize);
}
+ int32_t maxWidth;
+ if (msg->findInt32("max-width", &maxWidth)) {
+ meta->setInt32(kKeyMaxWidth, maxWidth);
+ }
+
+ int32_t maxHeight;
+ if (msg->findInt32("max-height", &maxHeight)) {
+ meta->setInt32(kKeyMaxHeight, maxHeight);
+ }
+
// reassemble the csd data into its original form
sp<ABuffer> csd0;
if (msg->findBuffer("csd-0", &csd0)) {
@@ -800,5 +842,36 @@ AString uriDebugString(const AString &uri, bool incognito) {
return AString("<no-scheme URI suppressed>");
}
+HLSTime::HLSTime(const sp<AMessage>& meta) :
+ mSeq(-1),
+ mTimeUs(-1ll),
+ mMeta(meta) {
+ if (meta != NULL) {
+ CHECK(meta->findInt32("discontinuitySeq", &mSeq));
+ CHECK(meta->findInt64("timeUs", &mTimeUs));
+ }
+}
+
+int64_t HLSTime::getSegmentTimeUs(bool midpoint) const {
+ int64_t segmentStartTimeUs = -1ll;
+ if (mMeta != NULL) {
+ CHECK(mMeta->findInt64("segmentStartTimeUs", &segmentStartTimeUs));
+ if (midpoint) {
+ int64_t durationUs;
+ CHECK(mMeta->findInt64("segmentDurationUs", &durationUs));
+ segmentStartTimeUs += durationUs / 2;
+ }
+ }
+ return segmentStartTimeUs;
+}
+
+bool operator <(const HLSTime &t0, const HLSTime &t1) {
+ // we can only compare discontinuity sequence and timestamp.
+ // (mSegmentTimeUs is not reliable in live streaming case, it's the
+ // time starting from beginning of playlist but playlist could change.)
+ return t0.mSeq < t1.mSeq
+ || (t0.mSeq == t1.mSeq && t0.mTimeUs < t1.mTimeUs);
+}
+
} // namespace android
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 5ec3438..8ef2dca 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -26,6 +26,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
+#include <utils/misc.h>
namespace android {
@@ -186,17 +187,31 @@ void FindAVCDimensions(
if (aspect_ratio_idc == 255 /* extendedSAR */) {
sar_width = br.getBits(16);
sar_height = br.getBits(16);
- } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) {
- static const int32_t kFixedSARWidth[] = {
- 1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160
+ } else {
+ static const struct { unsigned width, height; } kFixedSARs[] = {
+ { 0, 0 }, // Invalid
+ { 1, 1 },
+ { 12, 11 },
+ { 10, 11 },
+ { 16, 11 },
+ { 40, 33 },
+ { 24, 11 },
+ { 20, 11 },
+ { 32, 11 },
+ { 80, 33 },
+ { 18, 11 },
+ { 15, 11 },
+ { 64, 33 },
+ { 160, 99 },
+ { 4, 3 },
+ { 3, 2 },
+ { 2, 1 },
};
- static const int32_t kFixedSARHeight[] = {
- 1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99
- };
-
- sar_width = kFixedSARWidth[aspect_ratio_idc - 1];
- sar_height = kFixedSARHeight[aspect_ratio_idc - 1];
+ if (aspect_ratio_idc > 0 && aspect_ratio_idc < NELEM(kFixedSARs)) {
+ sar_width = kFixedSARs[aspect_ratio_idc].width;
+ sar_height = kFixedSARs[aspect_ratio_idc].height;
+ }
}
}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 495bad0..10937ec 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -623,7 +623,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
} else {
int64_t currentTime = mBufferTimestamps.top();
currentTime += mStreamInfo->aacSamplesPerFrame *
- 1000000ll / mStreamInfo->sampleRate;
+ 1000000ll / mStreamInfo->aacSampleRate;
mBufferTimestamps.add(currentTime);
}
} else {
@@ -874,7 +874,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
// adjust/interpolate next time stamp
*currentBufLeft -= decodedSize;
*nextTimeStamp += mStreamInfo->aacSamplesPerFrame *
- 1000000ll / mStreamInfo->sampleRate;
+ 1000000ll / mStreamInfo->aacSampleRate;
ALOGV("adjusted nextTimeStamp/size to %lld/%d",
(long long) *nextTimeStamp, *currentBufLeft);
} else {
@@ -975,6 +975,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
mBufferSizes.clear();
mDecodedSizes.clear();
mLastInHeader = NULL;
+ mEndOfInput = false;
} else {
int avail;
while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
@@ -989,6 +990,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
mOutputBufferCount++;
}
mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
+ mEndOfOutput = false;
}
}
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 8a95643..a35909e 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -38,7 +38,10 @@ SoftVPX::SoftVPX(
NULL /* profileLevels */, 0 /* numProfileLevels */,
320 /* width */, 240 /* height */, callbacks, appData, component),
mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
+ mEOSStatus(INPUT_DATA_AVAILABLE),
mCtx(NULL),
+ mFrameParallelMode(false),
+ mTimeStampIdx(0),
mImg(NULL) {
// arbitrary from avc/hevc as vpx does not specify a min compression ratio
const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
@@ -51,9 +54,7 @@ SoftVPX::SoftVPX(
}
SoftVPX::~SoftVPX() {
- vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
- delete (vpx_codec_ctx_t *)mCtx;
- mCtx = NULL;
+ destroyDecoder();
}
static int GetCPUCoreCount() {
@@ -73,12 +74,19 @@ status_t SoftVPX::initDecoder() {
mCtx = new vpx_codec_ctx_t;
vpx_codec_err_t vpx_err;
vpx_codec_dec_cfg_t cfg;
+ vpx_codec_flags_t flags;
memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
+ memset(&flags, 0, sizeof(vpx_codec_flags_t));
cfg.threads = GetCPUCoreCount();
+
+ if (mFrameParallelMode) {
+ flags |= VPX_CODEC_USE_FRAME_THREADING;
+ }
+
if ((vpx_err = vpx_codec_dec_init(
(vpx_codec_ctx_t *)mCtx,
mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo,
- &cfg, 0))) {
+ &cfg, flags))) {
ALOGE("on2 decoder failed to initialize. (%d)", vpx_err);
return UNKNOWN_ERROR;
}
@@ -86,86 +94,155 @@ status_t SoftVPX::initDecoder() {
return OK;
}
+status_t SoftVPX::destroyDecoder() {
+ vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
+ delete (vpx_codec_ctx_t *)mCtx;
+ mCtx = NULL;
+ return OK;
+}
+
+bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+ BufferInfo *outInfo = NULL;
+ OMX_BUFFERHEADERTYPE *outHeader = NULL;
+ vpx_codec_iter_t iter = NULL;
+
+ if (flushDecoder && mFrameParallelMode) {
+ // Flush decoder by passing NULL data ptr and 0 size.
+ // Ideally, this should never fail.
+ if (vpx_codec_decode((vpx_codec_ctx_t *)mCtx, NULL, 0, NULL, 0)) {
+ ALOGE("Failed to flush on2 decoder.");
+ return false;
+ }
+ }
+
+ if (!display) {
+ if (!flushDecoder) {
+ ALOGE("Invalid operation.");
+ return false;
+ }
+ // Drop all the decoded frames in decoder.
+ while ((mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter))) {
+ }
+ return true;
+ }
+
+ while (!outQueue.empty()) {
+ if (mImg == NULL) {
+ mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
+ if (mImg == NULL) {
+ break;
+ }
+ }
+ uint32_t width = mImg->d_w;
+ uint32_t height = mImg->d_h;
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ CHECK_EQ(mImg->fmt, VPX_IMG_FMT_I420);
+ handlePortSettingsChange(portWillReset, width, height);
+ if (*portWillReset) {
+ return true;
+ }
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = (width * height * 3) / 2;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv;
+
+ uint8_t *dst = outHeader->pBuffer;
+ const uint8_t *srcY = (const uint8_t *)mImg->planes[VPX_PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)mImg->planes[VPX_PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)mImg->planes[VPX_PLANE_V];
+ size_t srcYStride = mImg->stride[VPX_PLANE_Y];
+ size_t srcUStride = mImg->stride[VPX_PLANE_U];
+ size_t srcVStride = mImg->stride[VPX_PLANE_V];
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
+ mImg = NULL;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+
+ if (!eos) {
+ return true;
+ }
+
+ if (!outQueue.empty()) {
+ outInfo = *outQueue.begin();
+ outQueue.erase(outQueue.begin());
+ outHeader = outInfo->mHeader;
+ outHeader->nTimeStamp = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ mEOSStatus = OUTPUT_FRAMES_FLUSHED;
+ }
+ return true;
+}
+
void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
- if (mOutputPortSettingsChange != NONE) {
+ if (mOutputPortSettingsChange != NONE || mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
return;
}
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
bool EOSseen = false;
+ vpx_codec_err_t err;
+ bool portWillReset = false;
+
+ while ((mEOSStatus == INPUT_EOS_SEEN || !inQueue.empty())
+ && !outQueue.empty()) {
+ // Output the pending frames that left from last port reset or decoder flush.
+ if (mEOSStatus == INPUT_EOS_SEEN || mImg != NULL) {
+ if (!outputBuffers(
+ mEOSStatus == INPUT_EOS_SEEN, true /* display */,
+ mEOSStatus == INPUT_EOS_SEEN, &portWillReset)) {
+ ALOGE("on2 decoder failed to output frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ if (portWillReset || mEOSStatus == OUTPUT_FRAMES_FLUSHED ||
+ mEOSStatus == INPUT_EOS_SEEN) {
+ return;
+ }
+ }
- while (!inQueue.empty() && !outQueue.empty()) {
BufferInfo *inInfo = *inQueue.begin();
OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp;
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ mEOSStatus = INPUT_EOS_SEEN;
EOSseen = true;
- if (inHeader->nFilledLen == 0) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
- return;
- }
}
- if (mImg == NULL) {
- if (vpx_codec_decode(
- (vpx_codec_ctx_t *)mCtx,
- inHeader->pBuffer + inHeader->nOffset,
- inHeader->nFilledLen,
- NULL,
- 0)) {
- ALOGE("on2 decoder failed to decode frame.");
-
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
- vpx_codec_iter_t iter = NULL;
- mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
+ if (inHeader->nFilledLen > 0 &&
+ vpx_codec_decode((vpx_codec_ctx_t *)mCtx,
+ inHeader->pBuffer + inHeader->nOffset,
+ inHeader->nFilledLen,
+ &mTimeStamps[mTimeStampIdx], 0)) {
+ ALOGE("on2 decoder failed to decode frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
}
+ mTimeStampIdx = (mTimeStampIdx + 1) % kNumBuffers;
- if (mImg != NULL) {
- CHECK_EQ(mImg->fmt, IMG_FMT_I420);
-
- uint32_t width = mImg->d_w;
- uint32_t height = mImg->d_h;
- bool portWillReset = false;
- handlePortSettingsChange(&portWillReset, width, height);
- if (portWillReset) {
- return;
- }
-
- outHeader->nOffset = 0;
- outHeader->nFilledLen = (width * height * 3) / 2;
- outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0;
- outHeader->nTimeStamp = inHeader->nTimeStamp;
-
- uint8_t *dst = outHeader->pBuffer;
- const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y];
- const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U];
- const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V];
- size_t srcYStride = mImg->stride[PLANE_Y];
- size_t srcUStride = mImg->stride[PLANE_U];
- size_t srcVStride = mImg->stride[PLANE_V];
- copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
-
- mImg = NULL;
- outInfo->mOwnedByUs = false;
- outQueue.erase(outQueue.begin());
- outInfo = NULL;
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
+ if (!outputBuffers(
+ EOSseen /* flushDecoder */, true /* display */, EOSseen, &portWillReset)) {
+ ALOGE("on2 decoder failed to output frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ if (portWillReset) {
+ return;
}
inInfo->mOwnedByUs = false;
@@ -176,6 +253,30 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
+void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == kInputPortIndex) {
+ bool portWillReset = false;
+ if (!outputBuffers(
+ true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
+ ALOGE("Failed to flush decoder.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+ }
+}
+
+void SoftVPX::onReset() {
+ bool portWillReset = false;
+ if (!outputBuffers(
+ true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
+ ALOGW("Failed to flush decoder. Try to hard reset decoder");
+ destroyDecoder();
+ initDecoder();
+ }
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+}
+
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index 8f68693..8ccbae2 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -38,6 +38,8 @@ protected:
virtual ~SoftVPX();
virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
private:
enum {
@@ -49,11 +51,21 @@ private:
MODE_VP9
} mMode;
- void *mCtx;
+ enum {
+ INPUT_DATA_AVAILABLE, // VPX component is ready to decode data.
+ INPUT_EOS_SEEN, // VPX component saw EOS and is flushing On2 decoder.
+ OUTPUT_FRAMES_FLUSHED // VPX component finished flushing On2 decoder.
+ } mEOSStatus;
+ void *mCtx;
+ bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
+ OMX_TICKS mTimeStamps[kNumBuffers];
+ uint8_t mTimeStampIdx;
vpx_image_t *mImg;
status_t initDecoder();
+ status_t destroyDecoder();
+ bool outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset);
DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
};
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index b8084ae..6322dc2 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -345,9 +345,15 @@ void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
}
uint8_t channel_mapping[kMaxChannels] = {0};
- memcpy(&channel_mapping,
- kDefaultOpusChannelLayout,
- kMaxChannelsWithDefaultLayout);
+ if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+ } else {
+ memcpy(&channel_mapping,
+ mHeader->stream_map,
+ mHeader->channels);
+ }
int status = OPUS_INVALID_STATE;
mDecoder = opus_multistream_decoder_create(kRate,
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 4e75250..21da707 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -98,33 +98,49 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {
mCropWidth = mCropRight - mCropLeft + 1;
mCropHeight = mCropBottom - mCropTop + 1;
- int halFormat;
- size_t bufWidth, bufHeight;
-
- switch (mColorFormat) {
- case OMX_COLOR_FormatYUV420Planar:
- case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
- case OMX_COLOR_FormatYUV420SemiPlanar:
- {
- if (!runningInEmulator()) {
+ // by default convert everything to RGB565
+ int halFormat = HAL_PIXEL_FORMAT_RGB_565;
+ size_t bufWidth = mCropWidth;
+ size_t bufHeight = mCropHeight;
+
+ // hardware has YUV12 and RGBA8888 support, so convert known formats
+ if (!runningInEmulator()) {
+ switch (mColorFormat) {
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ {
halFormat = HAL_PIXEL_FORMAT_YV12;
bufWidth = (mCropWidth + 1) & ~1;
bufHeight = (mCropHeight + 1) & ~1;
break;
}
-
- // fall through.
+ case OMX_COLOR_Format24bitRGB888:
+ {
+ halFormat = HAL_PIXEL_FORMAT_RGB_888;
+ bufWidth = (mCropWidth + 1) & ~1;
+ bufHeight = (mCropHeight + 1) & ~1;
+ break;
+ }
+ case OMX_COLOR_Format32bitARGB8888:
+ case OMX_COLOR_Format32BitRGBA8888:
+ {
+ halFormat = HAL_PIXEL_FORMAT_RGBA_8888;
+ bufWidth = (mCropWidth + 1) & ~1;
+ bufHeight = (mCropHeight + 1) & ~1;
+ break;
+ }
+ default:
+ {
+ break;
+ }
}
+ }
- default:
- halFormat = HAL_PIXEL_FORMAT_RGB_565;
- bufWidth = mCropWidth;
- bufHeight = mCropHeight;
-
- mConverter = new ColorConverter(
- mColorFormat, OMX_COLOR_Format16bitRGB565);
- CHECK(mConverter->isValid());
- break;
+ if (halFormat == HAL_PIXEL_FORMAT_RGB_565) {
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
}
CHECK(mNativeWindow != NULL);
@@ -201,6 +217,8 @@ void SoftwareRenderer::render(
CHECK_EQ(0, mapper.lock(
buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+ // TODO move the other conversions also into ColorConverter, and
+ // fix cropping issues (when mCropLeft/Top != 0 or mWidth != mCropWidth)
if (mConverter) {
mConverter->convert(
data,
@@ -211,7 +229,8 @@ void SoftwareRenderer::render(
0, 0, mCropWidth - 1, mCropHeight - 1);
} else if (mColorFormat == OMX_COLOR_FormatYUV420Planar) {
const uint8_t *src_y = (const uint8_t *)data;
- const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight;
+ const uint8_t *src_u =
+ (const uint8_t *)data + mWidth * mHeight;
const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2);
uint8_t *dst_y = (uint8_t *)dst;
@@ -239,11 +258,9 @@ void SoftwareRenderer::render(
}
} else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
|| mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
- const uint8_t *src_y =
- (const uint8_t *)data;
-
- const uint8_t *src_uv =
- (const uint8_t *)data + mWidth * (mHeight - mCropTop / 2);
+ const uint8_t *src_y = (const uint8_t *)data;
+ const uint8_t *src_uv = (const uint8_t *)data
+ + mWidth * (mHeight - mCropTop / 2);
uint8_t *dst_y = (uint8_t *)dst;
@@ -271,6 +288,38 @@ void SoftwareRenderer::render(
dst_u += dst_c_stride;
dst_v += dst_c_stride;
}
+ } else if (mColorFormat == OMX_COLOR_Format24bitRGB888) {
+ uint8_t* srcPtr = (uint8_t*)data;
+ uint8_t* dstPtr = (uint8_t*)dst;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ memcpy(dstPtr, srcPtr, mCropWidth * 3);
+ srcPtr += mWidth * 3;
+ dstPtr += buf->stride * 3;
+ }
+ } else if (mColorFormat == OMX_COLOR_Format32bitARGB8888) {
+ uint8_t *srcPtr, *dstPtr;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ srcPtr = (uint8_t*)data + mWidth * 4 * y;
+ dstPtr = (uint8_t*)dst + buf->stride * 4 * y;
+ for (size_t x = 0; x < (size_t)mCropWidth; ++x) {
+ uint8_t a = *srcPtr++;
+ for (size_t i = 0; i < 3; ++i) { // copy RGB
+ *dstPtr++ = *srcPtr++;
+ }
+ *dstPtr++ = a; // alpha last (ARGB to RGBA)
+ }
+ }
+ } else if (mColorFormat == OMX_COLOR_Format32BitRGBA8888) {
+ uint8_t* srcPtr = (uint8_t*)data;
+ uint8_t* dstPtr = (uint8_t*)dst;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ memcpy(dstPtr, srcPtr, mCropWidth * 4);
+ srcPtr += mWidth * 4;
+ dstPtr += buf->stride * 4;
+ }
} else {
LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat);
}
diff --git a/media/libstagefright/filters/Android.mk b/media/libstagefright/filters/Android.mk
new file mode 100644
index 0000000..36ab444
--- /dev/null
+++ b/media/libstagefright/filters/Android.mk
@@ -0,0 +1,27 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ ColorConvert.cpp \
+ GraphicBufferListener.cpp \
+ IntrinsicBlurFilter.cpp \
+ MediaFilter.cpp \
+ RSFilter.cpp \
+ SaturationFilter.cpp \
+ saturationARGB.rs \
+ SimpleFilter.cpp \
+ ZeroFilter.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/rs/cpp \
+ $(TOP)/frameworks/rs \
+
+intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
+LOCAL_C_INCLUDES += $(intermediates)
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE:= libstagefright_mediafilter
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp
new file mode 100644
index 0000000..a5039f9
--- /dev/null
+++ b/media/libstagefright/filters/ColorConvert.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ColorConvert.h"
+
+#ifndef max
+#define max(a,b) ((a) > (b) ? (a) : (b))
+#endif
+#ifndef min
+#define min(a,b) ((a) < (b) ? (a) : (b))
+#endif
+
+namespace android {
+
+void YUVToRGB(
+ int32_t y, int32_t u, int32_t v,
+ int32_t* r, int32_t* g, int32_t* b) {
+ y -= 16;
+ u -= 128;
+ v -= 128;
+
+ *b = 1192 * y + 2066 * u;
+ *g = 1192 * y - 833 * v - 400 * u;
+ *r = 1192 * y + 1634 * v;
+
+ *r = min(262143, max(0, *r));
+ *g = min(262143, max(0, *g));
+ *b = min(262143, max(0, *b));
+
+ *r >>= 10;
+ *g >>= 10;
+ *b >>= 10;
+}
+
+void convertYUV420spToARGB(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest) {
+ const int32_t bytes_per_pixel = 2;
+
+ for (int32_t i = 0; i < height; i++) {
+ for (int32_t j = 0; j < width; j++) {
+ int32_t y = *(pY + i * width + j);
+ int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
+ int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
+
+ int32_t r, g, b;
+ YUVToRGB(y, u, v, &r, &g, &b);
+
+ *dest++ = 0xFF;
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ }
+}
+
+void convertYUV420spToRGB888(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest) {
+ const int32_t bytes_per_pixel = 2;
+
+ for (int32_t i = 0; i < height; i++) {
+ for (int32_t j = 0; j < width; j++) {
+ int32_t y = *(pY + i * width + j);
+ int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
+ int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
+
+ int32_t r, g, b;
+ YUVToRGB(y, u, v, &r, &g, &b);
+
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ }
+}
+
+// HACK - not even slightly optimized
+// TODO: remove when RGBA support is added to SoftwareRenderer
+void convertRGBAToARGB(
+ uint8_t *src, int32_t width, int32_t height, uint32_t stride,
+ uint8_t *dest) {
+ for (size_t i = 0; i < height; ++i) {
+ for (size_t j = 0; j < width; ++j) {
+ uint8_t r = *src++;
+ uint8_t g = *src++;
+ uint8_t b = *src++;
+ uint8_t a = *src++;
+ *dest++ = a;
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ src += (stride - width) * 4;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/ColorConvert.h b/media/libstagefright/filters/ColorConvert.h
new file mode 100644
index 0000000..13faa02
--- /dev/null
+++ b/media/libstagefright/filters/ColorConvert.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef COLOR_CONVERT_H_
+#define COLOR_CONVERT_H_
+
+#include <inttypes.h>
+
+namespace android {
+
+void YUVToRGB(
+ int32_t y, int32_t u, int32_t v,
+ int32_t* r, int32_t* g, int32_t* b);
+
+void convertYUV420spToARGB(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest);
+
+void convertYUV420spToRGB888(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest);
+
+// TODO: remove when RGBA support is added to SoftwareRenderer
+void convertRGBAToARGB(
+ uint8_t *src, int32_t width, int32_t height, uint32_t stride,
+ uint8_t *dest);
+
+} // namespace android
+
+#endif // COLOR_CONVERT_H_
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
new file mode 100644
index 0000000..66374ba
--- /dev/null
+++ b/media/libstagefright/filters/GraphicBufferListener.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GraphicBufferListener"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <gui/BufferItem.h>
+
+#include "GraphicBufferListener.h"
+
+namespace android {
+
+status_t GraphicBufferListener::init(
+ const sp<AMessage> &notify,
+ size_t bufferWidth, size_t bufferHeight, size_t bufferCount) {
+ mNotify = notify;
+
+ String8 name("GraphicBufferListener");
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN);
+
+ status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
+ bufferCount, err);
+ return err;
+ }
+
+ wp<BufferQueue::ConsumerListener> listener =
+ static_cast<BufferQueue::ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy =
+ new BufferQueue::ProxyConsumerListener(listener);
+
+ err = mConsumer->consumerConnect(proxy, false);
+ if (err != NO_ERROR) {
+ ALOGE("Error connecting to BufferQueue: %s (%d)",
+ strerror(-err), err);
+ return err;
+ }
+
+ ALOGV("init() successful.");
+
+ return OK;
+}
+
+void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) {
+ ALOGV("onFrameAvailable() called");
+
+ {
+ Mutex::Autolock autoLock(mMutex);
+ mNumFramesAvailable++;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ mNotify->setWhat(kWhatFrameAvailable);
+ mNotify->post();
+}
+
+void GraphicBufferListener::onBuffersReleased() {
+ ALOGV("onBuffersReleased() called");
+ // nothing to do
+}
+
+void GraphicBufferListener::onSidebandStreamChanged() {
+ ALOGW("GraphicBufferListener cannot consume sideband streams.");
+ // nothing to do
+}
+
+BufferItem GraphicBufferListener::getBufferItem() {
+ BufferItem item;
+
+ {
+ Mutex::Autolock autoLock(mMutex);
+ if (mNumFramesAvailable <= 0) {
+ ALOGE("getBuffer() called with no frames available");
+ return item;
+ }
+ mNumFramesAvailable--;
+ }
+
+ status_t err = mConsumer->acquireBuffer(&item, 0);
+ if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
+ // shouldn't happen, since we track num frames available
+ ALOGE("frame was not available");
+ item.mBuf = -1;
+ return item;
+ } else if (err != OK) {
+ ALOGE("acquireBuffer returned err=%d", err);
+ item.mBuf = -1;
+ return item;
+ }
+
+ // Wait for it to become available.
+ err = item.mFence->waitForever("GraphicBufferListener::getBufferItem");
+ if (err != OK) {
+ ALOGW("failed to wait for buffer fence: %d", err);
+ // keep going
+ }
+
+ // If this is the first time we're seeing this buffer, add it to our
+ // slot table.
+ if (item.mGraphicBuffer != NULL) {
+ ALOGV("setting mBufferSlot %d", item.mBuf);
+ mBufferSlot[item.mBuf] = item.mGraphicBuffer;
+ }
+
+ return item;
+}
+
+sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) {
+ sp<GraphicBuffer> buf;
+ if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
+ ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
+ return buf;
+ }
+
+ buf = mBufferSlot[item.mBuf];
+ CHECK(buf.get() != NULL);
+
+ return buf;
+}
+
+status_t GraphicBufferListener::releaseBuffer(BufferItem item) {
+ if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
+ ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/GraphicBufferListener.h
new file mode 100644
index 0000000..586bf65
--- /dev/null
+++ b/media/libstagefright/filters/GraphicBufferListener.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GRAPHIC_BUFFER_LISTENER_H_
+#define GRAPHIC_BUFFER_LISTENER_H_
+
+#include <gui/BufferQueue.h>
+
+namespace android {
+
+struct AMessage;
+
+struct GraphicBufferListener : public BufferQueue::ConsumerListener {
+public:
+ GraphicBufferListener() {};
+
+ status_t init(
+ const sp<AMessage> &notify,
+ size_t bufferWidth, size_t bufferHeight, size_t bufferCount);
+
+ virtual void onFrameAvailable(const BufferItem& item);
+ virtual void onBuffersReleased();
+ virtual void onSidebandStreamChanged();
+
+ // Returns the handle to the producer side of the BufferQueue. Buffers
+ // queued on this will be received by GraphicBufferListener.
+ sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
+ return mProducer;
+ }
+
+ BufferItem getBufferItem();
+ sp<GraphicBuffer> getBuffer(BufferItem item);
+ status_t releaseBuffer(BufferItem item);
+
+ enum {
+ kWhatFrameAvailable = 'frav',
+ };
+
+private:
+ sp<AMessage> mNotify;
+ size_t mNumFramesAvailable;
+
+ mutable Mutex mMutex;
+
+ // Our BufferQueue interfaces. mProducer is passed to the producer through
+ // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
+ // the buffers queued by the producer.
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
+
+ // Cache of GraphicBuffers from the buffer queue.
+ sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS];
+};
+
+} // namespace android
+
+#endif // GRAPHIC_BUFFER_LISTENER_H
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
new file mode 100644
index 0000000..cbcf699
--- /dev/null
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IntrinsicBlurFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "IntrinsicBlurFilter.h"
+
+namespace android {
+
+status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ return OK;
+}
+
+status_t IntrinsicBlurFilter::start() {
+ // TODO: use a single RS context object for entire application
+ mRS = new RSC::RS();
+
+ if (!mRS->init(mCacheDir.c_str())) {
+ ALOGE("Failed to initialize RenderScript context.");
+ return NO_INIT;
+ }
+
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e);
+ mBlur->setRadius(mBlurRadius);
+ mBlur->setInput(mAllocIn);
+
+ return OK;
+}
+
+void IntrinsicBlurFilter::reset() {
+ mBlur.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ float blurRadius;
+ if (params->findFloat("blur-radius", &blurRadius)) {
+ mBlurRadius = blurRadius;
+ }
+
+ return OK;
+}
+
+status_t IntrinsicBlurFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mBlur->forEach(mAllocOut);
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/IntrinsicBlurFilter.h
new file mode 100644
index 0000000..4707ab7
--- /dev/null
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef INTRINSIC_BLUR_FILTER_H_
+#define INTRINSIC_BLUR_FILTER_H_
+
+#include "RenderScript.h"
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct IntrinsicBlurFilter : public SimpleFilter {
+public:
+ IntrinsicBlurFilter() : mBlurRadius(1.f) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~IntrinsicBlurFilter() {};
+
+private:
+ AString mCacheDir;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+ RSC::sp<RSC::ScriptIntrinsicBlur> mBlur;
+ float mBlurRadius;
+};
+
+} // namespace android
+
+#endif // INTRINSIC_BLUR_FILTER_H_
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
new file mode 100644
index 0000000..0a09575
--- /dev/null
+++ b/media/libstagefright/filters/MediaFilter.cpp
@@ -0,0 +1,818 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaFilter"
+
+#include <inttypes.h>
+#include <utils/Trace.h>
+
+#include <binder/MemoryDealer.h>
+
+#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaFilter.h>
+
+#include <gui/BufferItem.h>
+
+#include "ColorConvert.h"
+#include "GraphicBufferListener.h"
+#include "IntrinsicBlurFilter.h"
+#include "RSFilter.h"
+#include "SaturationFilter.h"
+#include "ZeroFilter.h"
+
+namespace android {
+
+// parameter: number of input and output buffers
+static const size_t kBufferCountActual = 4;
+
+MediaFilter::MediaFilter()
+ : mState(UNINITIALIZED),
+ mGeneration(0),
+ mGraphicBufferListener(NULL) {
+}
+
+MediaFilter::~MediaFilter() {
+}
+
+//////////////////// PUBLIC FUNCTIONS //////////////////////////////////////////
+
+void MediaFilter::setNotificationMessage(const sp<AMessage> &msg) {
+ mNotify = msg;
+}
+
+void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatAllocateComponent);
+ msg->setTarget(this);
+ msg->post();
+}
+
+void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatConfigureComponent);
+ msg->setTarget(this);
+ msg->post();
+}
+
+void MediaFilter::initiateCreateInputSurface() {
+ (new AMessage(kWhatCreateInputSurface, this))->post();
+}
+
+void MediaFilter::initiateStart() {
+ (new AMessage(kWhatStart, this))->post();
+}
+
+void MediaFilter::initiateShutdown(bool keepComponentAllocated) {
+ sp<AMessage> msg = new AMessage(kWhatShutdown, this);
+ msg->setInt32("keepComponentAllocated", keepComponentAllocated);
+ msg->post();
+}
+
+void MediaFilter::signalFlush() {
+ (new AMessage(kWhatFlush, this))->post();
+}
+
+void MediaFilter::signalResume() {
+ (new AMessage(kWhatResume, this))->post();
+}
+
+// nothing to do
+void MediaFilter::signalRequestIDRFrame() {
+ return;
+}
+
+void MediaFilter::signalSetParameters(const sp<AMessage> &params) {
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
+ msg->setMessage("params", params);
+ msg->post();
+}
+
+void MediaFilter::signalEndOfInputStream() {
+ (new AMessage(kWhatSignalEndOfInputStream, this))->post();
+}
+
+void MediaFilter::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatAllocateComponent:
+ {
+ onAllocateComponent(msg);
+ break;
+ }
+ case kWhatConfigureComponent:
+ {
+ onConfigureComponent(msg);
+ break;
+ }
+ case kWhatStart:
+ {
+ onStart();
+ break;
+ }
+ case kWhatProcessBuffers:
+ {
+ processBuffers();
+ break;
+ }
+ case kWhatInputBufferFilled:
+ {
+ onInputBufferFilled(msg);
+ break;
+ }
+ case kWhatOutputBufferDrained:
+ {
+ onOutputBufferDrained(msg);
+ break;
+ }
+ case kWhatShutdown:
+ {
+ onShutdown(msg);
+ break;
+ }
+ case kWhatFlush:
+ {
+ onFlush();
+ break;
+ }
+ case kWhatResume:
+ {
+ // nothing to do
+ break;
+ }
+ case kWhatSetParameters:
+ {
+ onSetParameters(msg);
+ break;
+ }
+ case kWhatCreateInputSurface:
+ {
+ onCreateInputSurface();
+ break;
+ }
+ case GraphicBufferListener::kWhatFrameAvailable:
+ {
+ onInputFrameAvailable();
+ break;
+ }
+ case kWhatSignalEndOfInputStream:
+ {
+ onSignalEndOfInputStream();
+ break;
+ }
+ default:
+ {
+ ALOGE("Message not handled:\n%s", msg->debugString().c_str());
+ break;
+ }
+ }
+}
+
+//////////////////// PORT DESCRIPTION //////////////////////////////////////////
+
+MediaFilter::PortDescription::PortDescription() {
+}
+
+void MediaFilter::PortDescription::addBuffer(
+ IOMX::buffer_id id, const sp<ABuffer> &buffer) {
+ mBufferIDs.push_back(id);
+ mBuffers.push_back(buffer);
+}
+
+size_t MediaFilter::PortDescription::countBuffers() {
+ return mBufferIDs.size();
+}
+
+IOMX::buffer_id MediaFilter::PortDescription::bufferIDAt(size_t index) const {
+ return mBufferIDs.itemAt(index);
+}
+
+sp<ABuffer> MediaFilter::PortDescription::bufferAt(size_t index) const {
+ return mBuffers.itemAt(index);
+}
+
+//////////////////// HELPER FUNCTIONS //////////////////////////////////////////
+
+void MediaFilter::signalProcessBuffers() {
+ (new AMessage(kWhatProcessBuffers, this))->post();
+}
+
+void MediaFilter::signalError(status_t error) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatError);
+ notify->setInt32("err", error);
+ notify->post();
+}
+
+status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+ const bool isInput = portIndex == kPortIndexInput;
+ const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize;
+
+ CHECK(mDealer[portIndex] == NULL);
+ CHECK(mBuffers[portIndex].isEmpty());
+
+ ALOGV("Allocating %zu buffers of size %zu on %s port",
+ kBufferCountActual, bufferSize,
+ isInput ? "input" : "output");
+
+ size_t totalSize = kBufferCountActual * bufferSize;
+
+ mDealer[portIndex] = new MemoryDealer(totalSize, "MediaFilter");
+
+ for (size_t i = 0; i < kBufferCountActual; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(bufferSize);
+ CHECK(mem.get() != NULL);
+
+ BufferInfo info;
+ info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mBufferID = i;
+ info.mGeneration = mGeneration;
+ info.mOutputFlags = 0;
+ info.mData = new ABuffer(mem->pointer(), bufferSize);
+ info.mData->meta()->setInt64("timeUs", 0);
+
+ mBuffers[portIndex].push_back(info);
+
+ if (!isInput) {
+ mAvailableOutputBuffers.push(
+ &mBuffers[portIndex].editItemAt(i));
+ }
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
+
+ notify->setInt32("portIndex", portIndex);
+
+ sp<PortDescription> desc = new PortDescription;
+
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex][i];
+
+ desc->addBuffer(info.mBufferID, info.mData);
+ }
+
+ notify->setObject("portDesc", desc);
+ notify->post();
+
+ return OK;
+}
+
+MediaFilter::BufferInfo* MediaFilter::findBufferByID(
+ uint32_t portIndex, IOMX::buffer_id bufferID,
+ ssize_t *index) {
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+ if (info->mBufferID == bufferID) {
+ if (index != NULL) {
+ *index = i;
+ }
+ return info;
+ }
+ }
+
+ TRESPASS();
+
+ return NULL;
+}
+
+void MediaFilter::postFillThisBuffer(BufferInfo *info) {
+ ALOGV("postFillThisBuffer on buffer %d", info->mBufferID);
+ if (mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ info->mGeneration = mGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+
+ info->mData->meta()->clear();
+ notify->setBuffer("buffer", info->mData);
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this);
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+ notify->post();
+}
+
+void MediaFilter::postDrainThisBuffer(BufferInfo *info) {
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ info->mGeneration = mGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+ notify->setInt32("flags", info->mOutputFlags);
+ notify->setBuffer("buffer", info->mData);
+
+ sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this);
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ notify->post();
+
+ info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+}
+
+void MediaFilter::postEOS() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatEOS);
+ notify->setInt32("err", ERROR_END_OF_STREAM);
+ notify->post();
+
+ ALOGV("Sent kWhatEOS.");
+}
+
+void MediaFilter::sendFormatChange() {
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setInt32("what", kWhatOutputFormatChanged);
+
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+ notify->setString("mime", mime.c_str());
+
+ notify->setInt32("stride", mStride);
+ notify->setInt32("slice-height", mSliceHeight);
+ notify->setInt32("color-format", mColorFormatOut);
+ notify->setRect("crop", 0, 0, mStride - 1, mSliceHeight - 1);
+ notify->setInt32("width", mWidth);
+ notify->setInt32("height", mHeight);
+
+ notify->post();
+}
+
+void MediaFilter::requestFillEmptyInput() {
+ if (mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
+
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ postFillThisBuffer(info);
+ }
+ }
+}
+
+void MediaFilter::processBuffers() {
+ if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) {
+ ALOGV("Skipping process (buffers unavailable)");
+ return;
+ }
+
+ if (mPortEOS[kPortIndexOutput]) {
+ // TODO notify caller of queueInput error when it is supported
+ // in MediaCodec
+ ALOGW("Tried to process a buffer after EOS.");
+ return;
+ }
+
+ BufferInfo *inputInfo = mAvailableInputBuffers[0];
+ mAvailableInputBuffers.removeAt(0);
+ BufferInfo *outputInfo = mAvailableOutputBuffers[0];
+ mAvailableOutputBuffers.removeAt(0);
+
+ status_t err;
+ err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData);
+ if (err != (status_t)OK) {
+ outputInfo->mData->meta()->setInt32("err", err);
+ }
+
+ int64_t timeUs;
+ CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs));
+ outputInfo->mData->meta()->setInt64("timeUs", timeUs);
+ outputInfo->mOutputFlags = 0;
+ int32_t eos = 0;
+ if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) {
+ outputInfo->mOutputFlags |= OMX_BUFFERFLAG_EOS;
+ mPortEOS[kPortIndexOutput] = true;
+ outputInfo->mData->meta()->setInt32("eos", eos);
+ postEOS();
+ ALOGV("Output stream saw EOS.");
+ }
+
+ ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]",
+ inputInfo->mBufferID, inputInfo->mData->size(),
+ outputInfo->mBufferID, outputInfo->mData->size());
+
+ if (mGraphicBufferListener != NULL) {
+ delete inputInfo;
+ } else {
+ postFillThisBuffer(inputInfo);
+ }
+ postDrainThisBuffer(outputInfo);
+
+ // prevent any corner case where buffers could get stuck in queue
+ signalProcessBuffers();
+}
+
+void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) {
+ CHECK_EQ(mState, UNINITIALIZED);
+
+ CHECK(msg->findString("componentName", &mComponentName));
+ const char* name = mComponentName.c_str();
+ if (!strcasecmp(name, "android.filter.zerofilter")) {
+ mFilter = new ZeroFilter;
+ } else if (!strcasecmp(name, "android.filter.saturation")) {
+ mFilter = new SaturationFilter;
+ } else if (!strcasecmp(name, "android.filter.intrinsicblur")) {
+ mFilter = new IntrinsicBlurFilter;
+ } else if (!strcasecmp(name, "android.filter.RenderScript")) {
+ mFilter = new RSFilter;
+ } else {
+ ALOGE("Unrecognized filter name: %s", name);
+ signalError(NAME_NOT_FOUND);
+ return;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatComponentAllocated);
+ // HACK - need "OMX.google" to use MediaCodec's software renderer
+ notify->setString("componentName", "OMX.google.MediaFilter");
+ notify->post();
+ mState = INITIALIZED;
+ ALOGV("Handled kWhatAllocateComponent.");
+}
+
+void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) {
+ // TODO: generalize to allow audio filters as well as video
+
+ CHECK_EQ(mState, INITIALIZED);
+
+ // get params - at least mime, width & height
+ AString mime;
+ CHECK(msg->findString("mime", &mime));
+ if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) {
+ ALOGE("Bad mime: %s", mime.c_str());
+ signalError(BAD_VALUE);
+ return;
+ }
+
+ CHECK(msg->findInt32("width", &mWidth));
+ CHECK(msg->findInt32("height", &mHeight));
+ if (!msg->findInt32("stride", &mStride)) {
+ mStride = mWidth;
+ }
+ if (!msg->findInt32("slice-height", &mSliceHeight)) {
+ mSliceHeight = mHeight;
+ }
+
+ mMaxInputSize = mWidth * mHeight * 4; // room for ARGB8888
+ int32_t maxInputSize;
+ if (msg->findInt32("max-input-size", &maxInputSize)
+ && (size_t)maxInputSize > mMaxInputSize) {
+ mMaxInputSize = maxInputSize;
+ }
+
+ if (!msg->findInt32("color-format", &mColorFormatIn)) {
+ // default to OMX_COLOR_Format32bitARGB8888
+ mColorFormatIn = OMX_COLOR_Format32bitARGB8888;
+ msg->setInt32("color-format", mColorFormatIn);
+ }
+ mColorFormatOut = mColorFormatIn;
+
+ mMaxOutputSize = mWidth * mHeight * 4; // room for ARGB8888
+
+ AString cacheDir;
+ if (!msg->findString("cacheDir", &cacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ signalError(NAME_NOT_FOUND);
+ return;
+ }
+
+ status_t err;
+ err = mFilter->configure(msg);
+ if (err != (status_t)OK) {
+ ALOGE("Failed to configure filter component, err %d", err);
+ signalError(err);
+ return;
+ }
+
+ mInputFormat = new AMessage();
+ mInputFormat->setString("mime", mime.c_str());
+ mInputFormat->setInt32("stride", mStride);
+ mInputFormat->setInt32("slice-height", mSliceHeight);
+ mInputFormat->setInt32("color-format", mColorFormatIn);
+ mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
+ mInputFormat->setInt32("width", mWidth);
+ mInputFormat->setInt32("height", mHeight);
+
+ mOutputFormat = new AMessage();
+ mOutputFormat->setString("mime", mime.c_str());
+ mOutputFormat->setInt32("stride", mStride);
+ mOutputFormat->setInt32("slice-height", mSliceHeight);
+ mOutputFormat->setInt32("color-format", mColorFormatOut);
+ mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
+ mOutputFormat->setInt32("width", mWidth);
+ mOutputFormat->setInt32("height", mHeight);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatComponentConfigured);
+ notify->setString("componentName", "MediaFilter");
+ notify->setMessage("input-format", mInputFormat);
+ notify->setMessage("output-format", mOutputFormat);
+ notify->post();
+ mState = CONFIGURED;
+ ALOGV("Handled kWhatConfigureComponent.");
+
+ sendFormatChange();
+}
+
+void MediaFilter::onStart() {
+ CHECK_EQ(mState, CONFIGURED);
+
+ allocateBuffersOnPort(kPortIndexInput);
+
+ allocateBuffersOnPort(kPortIndexOutput);
+
+ status_t err = mFilter->start();
+ if (err != (status_t)OK) {
+ ALOGE("Failed to start filter component, err %d", err);
+ signalError(err);
+ return;
+ }
+
+ mPortEOS[kPortIndexInput] = false;
+ mPortEOS[kPortIndexOutput] = false;
+ mInputEOSResult = OK;
+ mState = STARTED;
+
+ requestFillEmptyInput();
+ ALOGV("Handled kWhatStart.");
+}
+
+void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ BufferInfo *info = findBufferByID(kPortIndexInput, bufferID);
+
+ if (mState != STARTED) {
+ // we're not running, so we'll just keep that buffer...
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ return;
+ }
+
+ if (info->mGeneration != mGeneration) {
+ ALOGV("Caught a stale input buffer [ID %d]", bufferID);
+ // buffer is stale (taken before a flush/shutdown) - repost it
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
+ postFillThisBuffer(info);
+ return;
+ }
+
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ sp<ABuffer> buffer;
+ int32_t err = OK;
+ bool eos = false;
+
+ if (!msg->findBuffer("buffer", &buffer)) {
+ // these are unfilled buffers returned by client
+ CHECK(msg->findInt32("err", &err));
+
+ if (err == OK) {
+ // buffers with no errors are returned on MediaCodec.flush
+ ALOGV("saw unfilled buffer (MediaCodec.flush)");
+ postFillThisBuffer(info);
+ return;
+ } else {
+ ALOGV("saw error %d instead of an input buffer", err);
+ eos = true;
+ }
+
+ buffer.clear();
+ }
+
+ int32_t isCSD;
+ if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD)
+ && isCSD != 0) {
+ // ignore codec-specific data buffers
+ ALOGW("MediaFilter received a codec-specific data buffer");
+ postFillThisBuffer(info);
+ return;
+ }
+
+ int32_t tmp;
+ if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
+ eos = true;
+ err = ERROR_END_OF_STREAM;
+ }
+
+ mAvailableInputBuffers.push_back(info);
+ processBuffers();
+
+ if (eos) {
+ mPortEOS[kPortIndexInput] = true;
+ mInputEOSResult = err;
+ }
+
+ ALOGV("Handled kWhatInputBufferFilled. [ID %u]", bufferID);
+}
+
+void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ BufferInfo *info = findBufferByID(kPortIndexOutput, bufferID);
+
+ if (mState != STARTED) {
+ // we're not running, so we'll just keep that buffer...
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ return;
+ }
+
+ if (info->mGeneration != mGeneration) {
+ ALOGV("Caught a stale output buffer [ID %d]", bufferID);
+ // buffer is stale (taken before a flush/shutdown) - keep it
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
+ return;
+ }
+
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ mAvailableOutputBuffers.push_back(info);
+
+ processBuffers();
+
+ ALOGV("Handled kWhatOutputBufferDrained. [ID %u]",
+ bufferID);
+}
+
+void MediaFilter::onShutdown(const sp<AMessage> &msg) {
+ mGeneration++;
+
+ if (mState != UNINITIALIZED) {
+ mFilter->reset();
+ }
+
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated));
+ if (!keepComponentAllocated || mState == UNINITIALIZED) {
+ mState = UNINITIALIZED;
+ } else {
+ mState = INITIALIZED;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+}
+
+void MediaFilter::onFlush() {
+ mGeneration++;
+
+ mAvailableInputBuffers.clear();
+ for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ }
+ mAvailableOutputBuffers.clear();
+ for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ mAvailableOutputBuffers.push_back(info);
+ }
+
+ mPortEOS[kPortIndexInput] = false;
+ mPortEOS[kPortIndexOutput] = false;
+ mInputEOSResult = OK;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+ ALOGV("Posted kWhatFlushCompleted");
+
+ // MediaCodec returns all input buffers after flush, so in
+ // onInputBufferFilled we call postFillThisBuffer on them
+}
+
+void MediaFilter::onSetParameters(const sp<AMessage> &msg) {
+ CHECK(mState != STARTED);
+
+ status_t err = mFilter->setParameters(msg);
+ if (err != (status_t)OK) {
+ ALOGE("setParameters returned err %d", err);
+ }
+}
+
+void MediaFilter::onCreateInputSurface() {
+ CHECK(mState == CONFIGURED);
+
+ mGraphicBufferListener = new GraphicBufferListener;
+
+ sp<AMessage> notify = new AMessage();
+ notify->setTarget(this);
+ status_t err = mGraphicBufferListener->init(
+ notify, mStride, mSliceHeight, kBufferCountActual);
+
+ if (err != OK) {
+ ALOGE("Failed to init mGraphicBufferListener: %d", err);
+ signalError(err);
+ return;
+ }
+
+ sp<AMessage> reply = mNotify->dup();
+ reply->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
+ reply->setObject(
+ "input-surface",
+ new BufferProducerWrapper(
+ mGraphicBufferListener->getIGraphicBufferProducer()));
+ reply->post();
+}
+
+void MediaFilter::onInputFrameAvailable() {
+ BufferItem item = mGraphicBufferListener->getBufferItem();
+ sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item);
+
+ // get pointer to graphic buffer
+ void* bufPtr;
+ buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr);
+
+ // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format
+ // conversion is hardcoded until we add this.
+ // TODO: check input format and convert only if necessary
+ // copy RGBA graphic buffer into temporary ARGB input buffer
+ BufferInfo *inputInfo = new BufferInfo;
+ inputInfo->mData = new ABuffer(buf->getWidth() * buf->getHeight() * 4);
+ ALOGV("Copying surface data into temp buffer.");
+ convertRGBAToARGB(
+ (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(),
+ buf->getStride(), inputInfo->mData->data());
+ inputInfo->mBufferID = item.mBuf;
+ inputInfo->mGeneration = mGeneration;
+ inputInfo->mOutputFlags = 0;
+ inputInfo->mStatus = BufferInfo::OWNED_BY_US;
+ inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000);
+
+ mAvailableInputBuffers.push_back(inputInfo);
+
+ mGraphicBufferListener->releaseBuffer(item);
+
+ signalProcessBuffers();
+}
+
+void MediaFilter::onSignalEndOfInputStream() {
+ // if using input surface, need to send an EOS output buffer
+ if (mGraphicBufferListener != NULL) {
+ Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput];
+ BufferInfo* eosBuf;
+ bool foundBuf = false;
+ for (size_t i = 0; i < kBufferCountActual; i++) {
+ eosBuf = &outputBufs->editItemAt(i);
+ if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) {
+ foundBuf = true;
+ break;
+ }
+ }
+
+ if (!foundBuf) {
+ ALOGE("onSignalEndOfInputStream failed to find an output buffer");
+ return;
+ }
+
+ eosBuf->mOutputFlags = OMX_BUFFERFLAG_EOS;
+ eosBuf->mGeneration = mGeneration;
+ eosBuf->mData->setRange(0, 0);
+ postDrainThisBuffer(eosBuf);
+ ALOGV("Posted EOS on output buffer %zu", eosBuf->mBufferID);
+ }
+
+ mPortEOS[kPortIndexOutput] = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
+ notify->post();
+
+ ALOGV("Output stream saw EOS.");
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp
new file mode 100644
index 0000000..b569945
--- /dev/null
+++ b/media/libstagefright/filters/RSFilter.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RSFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "RSFilter.h"
+
+namespace android {
+
+RSFilter::RSFilter() {
+
+}
+
+RSFilter::~RSFilter() {
+
+}
+
+status_t RSFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ sp<RenderScriptWrapper> wrapper;
+ if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) {
+ ALOGE("Failed to find RenderScriptWrapper in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ mRS = wrapper->mContext;
+ mCallback = wrapper->mCallback;
+
+ return OK;
+}
+
+status_t RSFilter::start() {
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ return OK;
+}
+
+void RSFilter::reset() {
+ mCallback.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t RSFilter::setParameters(const sp<AMessage> &msg) {
+ return mCallback->handleSetParameters(msg);
+}
+
+status_t RSFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mCallback->processBuffers(mAllocIn.get(), mAllocOut.get());
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/RSFilter.h
new file mode 100644
index 0000000..c5b5074
--- /dev/null
+++ b/media/libstagefright/filters/RSFilter.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RS_FILTER_H_
+#define RS_FILTER_H_
+
+#include <media/stagefright/RenderScriptWrapper.h>
+#include <RenderScript.h>
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct AString;
+
+struct RSFilter : public SimpleFilter {
+public:
+ RSFilter();
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~RSFilter();
+
+private:
+ AString mCacheDir;
+ sp<RenderScriptWrapper::RSFilterCallback> mCallback;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+};
+
+} // namespace android
+
+#endif // RS_FILTER_H_
diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp
new file mode 100644
index 0000000..ba5f75a
--- /dev/null
+++ b/media/libstagefright/filters/SaturationFilter.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SaturationFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "SaturationFilter.h"
+
+namespace android {
+
+status_t SaturationFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ return OK;
+}
+
+status_t SaturationFilter::start() {
+ // TODO: use a single RS context object for entire application
+ mRS = new RSC::RS();
+
+ if (!mRS->init(mCacheDir.c_str())) {
+ ALOGE("Failed to initialize RenderScript context.");
+ return NO_INIT;
+ }
+
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ mScript = new ScriptC_saturationARGB(mRS);
+
+ mScript->set_gSaturation(mSaturation);
+
+ return OK;
+}
+
+void SaturationFilter::reset() {
+ mScript.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t SaturationFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ float saturation;
+ if (params->findFloat("saturation", &saturation)) {
+ mSaturation = saturation;
+ }
+
+ return OK;
+}
+
+status_t SaturationFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mScript->forEach_root(mAllocIn, mAllocOut);
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/SaturationFilter.h
new file mode 100644
index 0000000..0545021
--- /dev/null
+++ b/media/libstagefright/filters/SaturationFilter.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SATURATION_FILTER_H_
+#define SATURATION_FILTER_H_
+
+#include <RenderScript.h>
+
+#include "ScriptC_saturationARGB.h"
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct SaturationFilter : public SimpleFilter {
+public:
+ SaturationFilter() : mSaturation(1.f) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~SaturationFilter() {};
+
+private:
+ AString mCacheDir;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+ RSC::sp<ScriptC_saturationARGB> mScript;
+ float mSaturation;
+};
+
+} // namespace android
+
+#endif // SATURATION_FILTER_H_
diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp
new file mode 100644
index 0000000..6c1ca2c
--- /dev/null
+++ b/media/libstagefright/filters/SimpleFilter.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+status_t SimpleFilter::configure(const sp<AMessage> &msg) {
+ CHECK(msg->findInt32("width", &mWidth));
+ CHECK(msg->findInt32("height", &mHeight));
+ if (!msg->findInt32("stride", &mStride)) {
+ mStride = mWidth;
+ }
+ if (!msg->findInt32("slice-height", &mSliceHeight)) {
+ mSliceHeight = mHeight;
+ }
+ CHECK(msg->findInt32("color-format", &mColorFormatIn));
+ mColorFormatOut = mColorFormatIn;
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/SimpleFilter.h
new file mode 100644
index 0000000..4cd37ef
--- /dev/null
+++ b/media/libstagefright/filters/SimpleFilter.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_FILTER_H_
+#define SIMPLE_FILTER_H_
+
+#include <stdint.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+struct ABuffer;
+struct AMessage;
+
+namespace android {
+
+struct SimpleFilter : public RefBase {
+public:
+ SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0),
+ mColorFormatIn(0), mColorFormatOut(0) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+
+ virtual status_t start() = 0;
+ virtual void reset() = 0;
+ virtual status_t setParameters(const sp<AMessage> &msg) = 0;
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) = 0;
+
+protected:
+ int32_t mWidth, mHeight;
+ int32_t mStride, mSliceHeight;
+ int32_t mColorFormatIn, mColorFormatOut;
+
+ virtual ~SimpleFilter() {};
+};
+
+} // namespace android
+
+#endif // SIMPLE_FILTER_H_
diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp
new file mode 100644
index 0000000..3f1243c
--- /dev/null
+++ b/media/libstagefright/filters/ZeroFilter.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ZeroFilter"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "ZeroFilter.h"
+
+namespace android {
+
+status_t ZeroFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ int32_t invert;
+ if (params->findInt32("invert", &invert)) {
+ mInvertData = (invert != 0);
+ }
+
+ return OK;
+}
+
+status_t ZeroFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ // assuming identical input & output buffers, since we're a copy filter
+ if (mInvertData) {
+ uint32_t* src = (uint32_t*)srcBuffer->data();
+ uint32_t* dest = (uint32_t*)outBuffer->data();
+ for (size_t i = 0; i < srcBuffer->size() / 4; ++i) {
+ *(dest++) = *(src++) ^ 0xFFFFFFFF;
+ }
+ } else {
+ memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size());
+ }
+ outBuffer->setRange(0, srcBuffer->size());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/ZeroFilter.h
new file mode 100644
index 0000000..bd34dfb
--- /dev/null
+++ b/media/libstagefright/filters/ZeroFilter.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ZERO_FILTER_H_
+#define ZERO_FILTER_H_
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct ZeroFilter : public SimpleFilter {
+public:
+ ZeroFilter() : mInvertData(false) {};
+
+ virtual status_t start() { return OK; };
+ virtual void reset() {};
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~ZeroFilter() {};
+
+private:
+ bool mInvertData;
+};
+
+} // namespace android
+
+#endif // ZERO_FILTER_H_
diff --git a/media/libstagefright/filters/saturation.rs b/media/libstagefright/filters/saturation.rs
new file mode 100644
index 0000000..2c867ac
--- /dev/null
+++ b/media/libstagefright/filters/saturation.rs
@@ -0,0 +1,40 @@
+// Sample script for RGB888 support (compare to saturationARGB.rs)
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar3 *v_in, uchar3 *v_out) {
+ // scale 0-255 uchar to 0-1.0 float
+ float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f,
+ v_in->b * 0.003921569f};
+
+ // apply saturation filter
+ float3 result = dot(in, gMonoMult);
+ result = mix(result, in, gSaturation);
+
+ // convert to uchar, copied from rsPackColorTo8888
+ v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/media/libstagefright/filters/saturationARGB.rs b/media/libstagefright/filters/saturationARGB.rs
new file mode 100644
index 0000000..1de9dd8
--- /dev/null
+++ b/media/libstagefright/filters/saturationARGB.rs
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+ v_out->x = v_in->x; // don't modify A
+
+ // get RGB, scale 0-255 uchar to 0-1.0 float
+ float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+ v_in->w * 0.003921569f};
+
+ // apply saturation filter
+ float3 result = dot(rgb, gMonoMult);
+ result = mix(result, rgb, gSaturation);
+
+ v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/media/libstagefright/foundation/AHandler.cpp b/media/libstagefright/foundation/AHandler.cpp
index bd5f7e9..7dbbe54 100644
--- a/media/libstagefright/foundation/AHandler.cpp
+++ b/media/libstagefright/foundation/AHandler.cpp
@@ -19,15 +19,23 @@
#include <utils/Log.h>
#include <media/stagefright/foundation/AHandler.h>
-
-#include <media/stagefright/foundation/ALooperRoster.h>
+#include <media/stagefright/foundation/AMessage.h>
namespace android {
-sp<ALooper> AHandler::looper() {
- extern ALooperRoster gLooperRoster;
+void AHandler::deliverMessage(const sp<AMessage> &msg) {
+ onMessageReceived(msg);
+ mMessageCounter++;
- return gLooperRoster.findLooper(id());
+ if (mVerboseStats) {
+ uint32_t what = msg->what();
+ ssize_t idx = mMessages.indexOfKey(what);
+ if (idx < 0) {
+ mMessages.add(what, 1);
+ } else {
+ mMessages.editValueAt(idx)++;
+ }
+ }
}
} // namespace android
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp
index 88b1c92..90b5f68 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/libstagefright/foundation/ALooper.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ALooper"
+
+#include <media/stagefright/foundation/ADebug.h>
+
#include <utils/Log.h>
#include <sys/time.h>
@@ -210,7 +213,7 @@ bool ALooper::loop() {
mEventQueue.erase(mEventQueue.begin());
}
- gLooperRoster.deliverMessage(event.mMessage);
+ event.mMessage->deliver();
// NOTE: It's important to note that at this point our "ALooper" object
// may no longer exist (its final reference may have gone away while
@@ -220,4 +223,29 @@ bool ALooper::loop() {
return true;
}
+// to be called by AMessage::postAndAwaitResponse only
+sp<AReplyToken> ALooper::createReplyToken() {
+ return new AReplyToken(this);
+}
+
+// to be called by AMessage::postAndAwaitResponse only
+status_t ALooper::awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage> *response) {
+ // return status in case we want to handle an interrupted wait
+ Mutex::Autolock autoLock(mRepliesLock);
+ CHECK(replyToken != NULL);
+ while (!replyToken->retrieveReply(response)) {
+ mRepliesCondition.wait(mRepliesLock);
+ }
+ return OK;
+}
+
+status_t ALooper::postReply(const sp<AReplyToken> &replyToken, const sp<AMessage> &reply) {
+ Mutex::Autolock autoLock(mRepliesLock);
+ status_t err = replyToken->setReply(reply);
+ if (err == OK) {
+ mRepliesCondition.broadcast();
+ }
+ return err;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 2d57aee..473ce1b 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -30,8 +30,7 @@ namespace android {
static bool verboseStats = false;
ALooperRoster::ALooperRoster()
- : mNextHandlerID(1),
- mNextReplyID(1) {
+ : mNextHandlerID(1) {
}
ALooper::handler_id ALooperRoster::registerHandler(
@@ -49,7 +48,7 @@ ALooper::handler_id ALooperRoster::registerHandler(
ALooper::handler_id handlerID = mNextHandlerID++;
mHandlers.add(handlerID, info);
- handler->setID(handlerID);
+ handler->setID(handlerID, looper);
return handlerID;
}
@@ -68,7 +67,7 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) {
sp<AHandler> handler = info.mHandler.promote();
if (handler != NULL) {
- handler->setID(0);
+ handler->setID(0, NULL);
}
mHandlers.removeItemsAt(index);
@@ -100,116 +99,6 @@ void ALooperRoster::unregisterStaleHandlers() {
}
}
-status_t ALooperRoster::postMessage(
- const sp<AMessage> &msg, int64_t delayUs) {
-
- sp<ALooper> looper = findLooper(msg->target());
-
- if (looper == NULL) {
- return -ENOENT;
- }
- looper->post(msg, delayUs);
- return OK;
-}
-
-void ALooperRoster::deliverMessage(const sp<AMessage> &msg) {
- sp<AHandler> handler;
-
- {
- Mutex::Autolock autoLock(mLock);
-
- ssize_t index = mHandlers.indexOfKey(msg->target());
-
- if (index < 0) {
- ALOGW("failed to deliver message. Target handler not registered.");
- return;
- }
-
- const HandlerInfo &info = mHandlers.valueAt(index);
- handler = info.mHandler.promote();
-
- if (handler == NULL) {
- ALOGW("failed to deliver message. "
- "Target handler %d registered, but object gone.",
- msg->target());
-
- mHandlers.removeItemsAt(index);
- return;
- }
- }
-
- handler->onMessageReceived(msg);
- handler->mMessageCounter++;
-
- if (verboseStats) {
- uint32_t what = msg->what();
- ssize_t idx = handler->mMessages.indexOfKey(what);
- if (idx < 0) {
- handler->mMessages.add(what, 1);
- } else {
- handler->mMessages.editValueAt(idx)++;
- }
- }
-}
-
-sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) {
- Mutex::Autolock autoLock(mLock);
-
- ssize_t index = mHandlers.indexOfKey(handlerID);
-
- if (index < 0) {
- return NULL;
- }
-
- sp<ALooper> looper = mHandlers.valueAt(index).mLooper.promote();
-
- if (looper == NULL) {
- mHandlers.removeItemsAt(index);
- return NULL;
- }
-
- return looper;
-}
-
-status_t ALooperRoster::postAndAwaitResponse(
- const sp<AMessage> &msg, sp<AMessage> *response) {
- sp<ALooper> looper = findLooper(msg->target());
-
- if (looper == NULL) {
- ALOGW("failed to post message. "
- "Target handler %d still registered, but object gone.",
- msg->target());
- response->clear();
- return -ENOENT;
- }
-
- Mutex::Autolock autoLock(mLock);
-
- uint32_t replyID = mNextReplyID++;
-
- msg->setInt32("replyID", replyID);
-
- looper->post(msg, 0 /* delayUs */);
-
- ssize_t index;
- while ((index = mReplies.indexOfKey(replyID)) < 0) {
- mRepliesCondition.wait(mLock);
- }
-
- *response = mReplies.valueAt(index);
- mReplies.removeItemsAt(index);
-
- return OK;
-}
-
-void ALooperRoster::postReply(uint32_t replyID, const sp<AMessage> &reply) {
- Mutex::Autolock autoLock(mLock);
-
- CHECK(mReplies.indexOfKey(replyID) < 0);
- mReplies.add(replyID, reply);
- mRepliesCondition.broadcast();
-}
-
static void makeFourCC(uint32_t fourcc, char *s) {
s[0] = (fourcc >> 24) & 0xff;
if (s[0]) {
@@ -225,7 +114,7 @@ static void makeFourCC(uint32_t fourcc, char *s) {
void ALooperRoster::dump(int fd, const Vector<String16>& args) {
bool clear = false;
bool oldVerbose = verboseStats;
- for (size_t i = 0;i < args.size(); i++) {
+ for (size_t i = 0; i < args.size(); i++) {
if (args[i] == String16("-c")) {
clear = true;
} else if (args[i] == String16("-von")) {
@@ -241,22 +130,23 @@ void ALooperRoster::dump(int fd, const Vector<String16>& args) {
Mutex::Autolock autoLock(mLock);
size_t n = mHandlers.size();
- s.appendFormat(" %zd registered handlers:\n", n);
+ s.appendFormat(" %zu registered handlers:\n", n);
for (size_t i = 0; i < n; i++) {
- s.appendFormat(" %zd: ", i);
+ s.appendFormat(" %d: ", mHandlers.keyAt(i));
HandlerInfo &info = mHandlers.editValueAt(i);
sp<ALooper> looper = info.mLooper.promote();
if (looper != NULL) {
- s.append(looper->mName.c_str());
+ s.append(looper->getName());
sp<AHandler> handler = info.mHandler.promote();
if (handler != NULL) {
+ handler->mVerboseStats = verboseStats;
s.appendFormat(": %u messages processed", handler->mMessageCounter);
if (verboseStats) {
for (size_t j = 0; j < handler->mMessages.size(); j++) {
char fourcc[15];
makeFourCC(handler->mMessages.keyAt(j), fourcc);
- s.appendFormat("\n %s: %d",
+ s.appendFormat("\n %s: %u",
fourcc,
handler->mMessages.valueAt(j));
}
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 1f46bc9..e549ff6 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -27,6 +27,7 @@
#include "ABuffer.h"
#include "ADebug.h"
#include "ALooperRoster.h"
+#include "AHandler.h"
#include "AString.h"
#include <binder/Parcel.h>
@@ -36,10 +37,27 @@ namespace android {
extern ALooperRoster gLooperRoster;
-AMessage::AMessage(uint32_t what, ALooper::handler_id target)
+status_t AReplyToken::setReply(const sp<AMessage> &reply) {
+ if (mReplied) {
+ ALOGE("trying to post a duplicate reply");
+ return -EBUSY;
+ }
+ CHECK(mReply == NULL);
+ mReply = reply;
+ mReplied = true;
+ return OK;
+}
+
+AMessage::AMessage(void)
+ : mWhat(0),
+ mTarget(0),
+ mNumItems(0) {
+}
+
+AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler)
: mWhat(what),
- mTarget(target),
mNumItems(0) {
+ setTarget(handler);
}
AMessage::~AMessage() {
@@ -54,12 +72,16 @@ uint32_t AMessage::what() const {
return mWhat;
}
-void AMessage::setTarget(ALooper::handler_id handlerID) {
- mTarget = handlerID;
-}
-
-ALooper::handler_id AMessage::target() const {
- return mTarget;
+void AMessage::setTarget(const sp<const AHandler> &handler) {
+ if (handler == NULL) {
+ mTarget = 0;
+ mHandler.clear();
+ mLooper.clear();
+ } else {
+ mTarget = handler->id();
+ mHandler = handler->getHandler();
+ mLooper = handler->getLooper();
+ }
}
void AMessage::clear() {
@@ -322,33 +344,76 @@ bool AMessage::findRect(
return true;
}
-void AMessage::post(int64_t delayUs) {
- gLooperRoster.postMessage(this, delayUs);
+void AMessage::deliver() {
+ sp<AHandler> handler = mHandler.promote();
+ if (handler == NULL) {
+ ALOGW("failed to deliver message as target handler %d is gone.", mTarget);
+ return;
+ }
+
+ handler->deliverMessage(this);
+}
+
+status_t AMessage::post(int64_t delayUs) {
+ sp<ALooper> looper = mLooper.promote();
+ if (looper == NULL) {
+ ALOGW("failed to post message as target looper for handler %d is gone.", mTarget);
+ return -ENOENT;
+ }
+
+ looper->post(this, delayUs);
+ return OK;
}
status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) {
- return gLooperRoster.postAndAwaitResponse(this, response);
+ sp<ALooper> looper = mLooper.promote();
+ if (looper == NULL) {
+ ALOGW("failed to post message as target looper for handler %d is gone.", mTarget);
+ return -ENOENT;
+ }
+
+ sp<AReplyToken> token = looper->createReplyToken();
+ if (token == NULL) {
+ ALOGE("failed to create reply token");
+ return -ENOMEM;
+ }
+ setObject("replyID", token);
+
+ looper->post(this, 0 /* delayUs */);
+ return looper->awaitResponse(token, response);
}
-void AMessage::postReply(uint32_t replyID) {
- gLooperRoster.postReply(replyID, this);
+status_t AMessage::postReply(const sp<AReplyToken> &replyToken) {
+ if (replyToken == NULL) {
+ ALOGW("failed to post reply to a NULL token");
+ return -ENOENT;
+ }
+ sp<ALooper> looper = replyToken->getLooper();
+ if (looper == NULL) {
+ ALOGW("failed to post reply as target looper is gone.");
+ return -ENOENT;
+ }
+ return looper->postReply(replyToken, this);
}
-bool AMessage::senderAwaitsResponse(uint32_t *replyID) const {
- int32_t tmp;
- bool found = findInt32("replyID", &tmp);
+bool AMessage::senderAwaitsResponse(sp<AReplyToken> *replyToken) {
+ sp<RefBase> tmp;
+ bool found = findObject("replyID", &tmp);
if (!found) {
return false;
}
- *replyID = static_cast<uint32_t>(tmp);
+ *replyToken = static_cast<AReplyToken *>(tmp.get());
+ tmp.clear();
+ setObject("replyID", tmp);
+ // TODO: delete Object instead of setting it to NULL
- return true;
+ return *replyToken != NULL;
}
sp<AMessage> AMessage::dup() const {
- sp<AMessage> msg = new AMessage(mWhat, mTarget);
+ sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());
msg->mNumItems = mNumItems;
#ifdef DUMP_STATS
@@ -532,7 +597,8 @@ AString AMessage::debugString(int32_t indent) const {
// static
sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {
int32_t what = parcel.readInt32();
- sp<AMessage> msg = new AMessage(what);
+ sp<AMessage> msg = new AMessage();
+ msg->setWhat(what);
msg->mNumItems = static_cast<size_t>(parcel.readInt32());
for (size_t i = 0; i < msg->mNumItems; ++i) {
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index d0f3bc2..4886000 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
@@ -49,8 +50,111 @@
namespace android {
-// Number of recently-read bytes to use for bandwidth estimation
-const size_t LiveSession::kBandwidthHistoryBytes = 200 * 1024;
+// static
+// Bandwidth Switch Mark Defaults
+const int64_t LiveSession::kUpSwitchMarkUs = 15000000ll;
+const int64_t LiveSession::kDownSwitchMarkUs = 20000000ll;
+const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll;
+const int64_t LiveSession::kResumeThresholdUs = 100000ll;
+
+// Buffer Prepare/Ready/Underflow Marks
+const int64_t LiveSession::kReadyMarkUs = 5000000ll;
+const int64_t LiveSession::kPrepareMarkUs = 1500000ll;
+const int64_t LiveSession::kUnderflowMarkUs = 1000000ll;
+
+struct LiveSession::BandwidthEstimator : public RefBase {
+ BandwidthEstimator();
+
+ void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
+ bool estimateBandwidth(int32_t *bandwidth);
+
+private:
+ // Bandwidth estimation parameters
+ static const int32_t kMaxBandwidthHistoryItems = 20;
+ static const int64_t kMaxBandwidthHistoryWindowUs = 5000000ll; // 5 sec
+
+ struct BandwidthEntry {
+ int64_t mDelayUs;
+ size_t mNumBytes;
+ };
+
+ Mutex mLock;
+ List<BandwidthEntry> mBandwidthHistory;
+ int64_t mTotalTransferTimeUs;
+ size_t mTotalTransferBytes;
+
+ DISALLOW_EVIL_CONSTRUCTORS(BandwidthEstimator);
+};
+
+LiveSession::BandwidthEstimator::BandwidthEstimator() :
+ mTotalTransferTimeUs(0),
+ mTotalTransferBytes(0) {
+}
+
+void LiveSession::BandwidthEstimator::addBandwidthMeasurement(
+ size_t numBytes, int64_t delayUs) {
+ AutoMutex autoLock(mLock);
+
+ BandwidthEntry entry;
+ entry.mDelayUs = delayUs;
+ entry.mNumBytes = numBytes;
+ mTotalTransferTimeUs += delayUs;
+ mTotalTransferBytes += numBytes;
+ mBandwidthHistory.push_back(entry);
+
+ // trim old samples, keeping at least kMaxBandwidthHistoryItems samples,
+ // and total transfer time at least kMaxBandwidthHistoryWindowUs.
+ while (mBandwidthHistory.size() > kMaxBandwidthHistoryItems) {
+ List<BandwidthEntry>::iterator it = mBandwidthHistory.begin();
+ if (mTotalTransferTimeUs - it->mDelayUs < kMaxBandwidthHistoryWindowUs) {
+ break;
+ }
+ mTotalTransferTimeUs -= it->mDelayUs;
+ mTotalTransferBytes -= it->mNumBytes;
+ mBandwidthHistory.erase(mBandwidthHistory.begin());
+ }
+}
+
+bool LiveSession::BandwidthEstimator::estimateBandwidth(int32_t *bandwidthBps) {
+ AutoMutex autoLock(mLock);
+
+ if (mBandwidthHistory.size() < 2) {
+ return false;
+ }
+
+ *bandwidthBps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs);
+ return true;
+}
+
+//static
+const char *LiveSession::getKeyForStream(StreamType type) {
+ switch (type) {
+ case STREAMTYPE_VIDEO:
+ return "timeUsVideo";
+ case STREAMTYPE_AUDIO:
+ return "timeUsAudio";
+ case STREAMTYPE_SUBTITLES:
+ return "timeUsSubtitle";
+ default:
+ TRESPASS();
+ }
+ return NULL;
+}
+
+//static
+const char *LiveSession::getNameForStream(StreamType type) {
+ switch (type) {
+ case STREAMTYPE_VIDEO:
+ return "video";
+ case STREAMTYPE_AUDIO:
+ return "audio";
+ case STREAMTYPE_SUBTITLES:
+ return "subs";
+ default:
+ break;
+ }
+ return "unknown";
+}
LiveSession::LiveSession(
const sp<AMessage> &notify, uint32_t flags,
@@ -58,168 +162,80 @@ LiveSession::LiveSession(
: mNotify(notify),
mFlags(flags),
mHTTPService(httpService),
+ mBuffering(false),
mInPreparationPhase(true),
+ mPollBufferingGeneration(0),
+ mPrevBufferPercentage(-1),
mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())),
mCurBandwidthIndex(-1),
+ mOrigBandwidthIndex(-1),
+ mLastBandwidthBps(-1ll),
+ mBandwidthEstimator(new BandwidthEstimator()),
+ mMaxWidth(720),
+ mMaxHeight(480),
mStreamMask(0),
mNewStreamMask(0),
mSwapMask(0),
- mCheckBandwidthGeneration(0),
mSwitchGeneration(0),
mSubtitleGeneration(0),
mLastDequeuedTimeUs(0ll),
mRealTimeBaseUs(0ll),
mReconfigurationInProgress(false),
mSwitchInProgress(false),
- mDisconnectReplyID(0),
- mSeekReplyID(0),
+ mUpSwitchMark(kUpSwitchMarkUs),
+ mDownSwitchMark(kDownSwitchMarkUs),
+ mUpSwitchMargin(kUpSwitchMarginUs),
mFirstTimeUsValid(false),
mFirstTimeUs(0),
mLastSeekTimeUs(0) {
-
mStreams[kAudioIndex] = StreamItem("audio");
mStreams[kVideoIndex] = StreamItem("video");
mStreams[kSubtitleIndex] = StreamItem("subtitles");
for (size_t i = 0; i < kMaxStreams; ++i) {
- mDiscontinuities.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
mPacketSources.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
mPacketSources2.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
- mBuffering[i] = false;
}
-
- size_t numHistoryItems = kBandwidthHistoryBytes /
- PlaylistFetcher::kDownloadBlockSize + 1;
- if (numHistoryItems < 5) {
- numHistoryItems = 5;
- }
- mHTTPDataSource->setBandwidthHistorySize(numHistoryItems);
}
LiveSession::~LiveSession() {
-}
-
-sp<ABuffer> LiveSession::createFormatChangeBuffer(bool swap) {
- ABuffer *discontinuity = new ABuffer(0);
- discontinuity->meta()->setInt32("discontinuity", ATSParser::DISCONTINUITY_FORMATCHANGE);
- discontinuity->meta()->setInt32("swapPacketSource", swap);
- discontinuity->meta()->setInt32("switchGeneration", mSwitchGeneration);
- discontinuity->meta()->setInt64("timeUs", -1);
- return discontinuity;
-}
-
-void LiveSession::swapPacketSource(StreamType stream) {
- sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream);
- sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream);
- sp<AnotherPacketSource> tmp = aps;
- aps = aps2;
- aps2 = tmp;
- aps2->clear();
+ if (mFetcherLooper != NULL) {
+ mFetcherLooper->stop();
+ }
}
status_t LiveSession::dequeueAccessUnit(
StreamType stream, sp<ABuffer> *accessUnit) {
- if (!(mStreamMask & stream)) {
- // return -EWOULDBLOCK to avoid halting the decoder
- // when switching between audio/video and audio only.
- return -EWOULDBLOCK;
- }
-
- status_t finalResult;
- sp<AnotherPacketSource> discontinuityQueue = mDiscontinuities.valueFor(stream);
- if (discontinuityQueue->hasBufferAvailable(&finalResult)) {
- discontinuityQueue->dequeueAccessUnit(accessUnit);
- // seeking, track switching
- sp<AMessage> extra;
- int64_t timeUs;
- if ((*accessUnit)->meta()->findMessage("extra", &extra)
- && extra != NULL
- && extra->findInt64("timeUs", &timeUs)) {
- // seeking only
- mLastSeekTimeUs = timeUs;
- mDiscontinuityOffsetTimesUs.clear();
- mDiscontinuityAbsStartTimesUs.clear();
- }
- return INFO_DISCONTINUITY;
- }
-
+ status_t finalResult = OK;
sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(stream);
- ssize_t idx = typeToIndex(stream);
- if (!packetSource->hasBufferAvailable(&finalResult)) {
+ ssize_t streamIdx = typeToIndex(stream);
+ if (streamIdx < 0) {
+ return INVALID_VALUE;
+ }
+ const char *streamStr = getNameForStream(stream);
+ // Do not let client pull data if we don't have data packets yet.
+ // We might only have a format discontinuity queued without data.
+ // When NuPlayerDecoder dequeues the format discontinuity, it will
+ // immediately try to getFormat. If we return NULL, NuPlayerDecoder
+ // thinks it can do seamless change, so will not shutdown decoder.
+ // When the actual format arrives, it can't handle it and get stuck.
+ if (!packetSource->hasDataBufferAvailable(&finalResult)) {
+ ALOGV("[%s] dequeueAccessUnit: no buffer available (finalResult=%d)",
+ streamStr, finalResult);
+
if (finalResult == OK) {
- mBuffering[idx] = true;
return -EAGAIN;
} else {
return finalResult;
}
}
- int32_t targetDuration = 0;
- sp<AMessage> meta = packetSource->getLatestEnqueuedMeta();
- if (meta != NULL) {
- meta->findInt32("targetDuration", &targetDuration);
- }
-
- int64_t targetDurationUs = targetDuration * 1000000ll;
- if (targetDurationUs == 0 ||
- targetDurationUs > PlaylistFetcher::kMinBufferedDurationUs) {
- // Fetchers limit buffering to
- // min(3 * targetDuration, kMinBufferedDurationUs)
- targetDurationUs = PlaylistFetcher::kMinBufferedDurationUs;
- }
-
- if (mBuffering[idx]) {
- if (mSwitchInProgress
- || packetSource->isFinished(0)
- || packetSource->getEstimatedDurationUs() > targetDurationUs) {
- mBuffering[idx] = false;
- }
- }
-
- if (mBuffering[idx]) {
- return -EAGAIN;
- }
-
- // wait for counterpart
- sp<AnotherPacketSource> otherSource;
- uint32_t mask = mNewStreamMask & mStreamMask;
- uint32_t fetchersMask = 0;
- for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- uint32_t fetcherMask = mFetcherInfos.valueAt(i).mFetcher->getStreamTypeMask();
- fetchersMask |= fetcherMask;
- }
- mask &= fetchersMask;
- if (stream == STREAMTYPE_AUDIO && (mask & STREAMTYPE_VIDEO)) {
- otherSource = mPacketSources.valueFor(STREAMTYPE_VIDEO);
- } else if (stream == STREAMTYPE_VIDEO && (mask & STREAMTYPE_AUDIO)) {
- otherSource = mPacketSources.valueFor(STREAMTYPE_AUDIO);
- }
- if (otherSource != NULL && !otherSource->hasBufferAvailable(&finalResult)) {
- return finalResult == OK ? -EAGAIN : finalResult;
- }
+ // Let the client dequeue as long as we have buffers available
+ // Do not make pause/resume decisions here.
status_t err = packetSource->dequeueAccessUnit(accessUnit);
- size_t streamIdx;
- const char *streamStr;
- switch (stream) {
- case STREAMTYPE_AUDIO:
- streamIdx = kAudioIndex;
- streamStr = "audio";
- break;
- case STREAMTYPE_VIDEO:
- streamIdx = kVideoIndex;
- streamStr = "video";
- break;
- case STREAMTYPE_SUBTITLES:
- streamIdx = kSubtitleIndex;
- streamStr = "subs";
- break;
- default:
- TRESPASS();
- }
-
StreamItem& strm = mStreams[streamIdx];
if (err == INFO_DISCONTINUITY) {
// adaptive streaming, discontinuities in the playlist
@@ -235,50 +251,34 @@ status_t LiveSession::dequeueAccessUnit(
streamStr,
type,
extra == NULL ? "NULL" : extra->debugString().c_str());
-
- int32_t swap;
- if ((*accessUnit)->meta()->findInt32("swapPacketSource", &swap) && swap) {
- int32_t switchGeneration;
- CHECK((*accessUnit)->meta()->findInt32("switchGeneration", &switchGeneration));
- {
- Mutex::Autolock lock(mSwapMutex);
- if (switchGeneration == mSwitchGeneration) {
- swapPacketSource(stream);
- sp<AMessage> msg = new AMessage(kWhatSwapped, id());
- msg->setInt32("stream", stream);
- msg->setInt32("switchGeneration", switchGeneration);
- msg->post();
- }
- }
- } else {
- size_t seq = strm.mCurDiscontinuitySeq;
- int64_t offsetTimeUs;
- if (mDiscontinuityOffsetTimesUs.indexOfKey(seq) >= 0) {
- offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(seq);
- } else {
- offsetTimeUs = 0;
- }
-
- seq += 1;
- if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
- int64_t firstTimeUs;
- firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
- offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs;
- offsetTimeUs += strm.mLastSampleDurationUs;
- } else {
- offsetTimeUs += strm.mLastSampleDurationUs;
- }
-
- mDiscontinuityOffsetTimesUs.add(seq, offsetTimeUs);
- }
} else if (err == OK) {
if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) {
- int64_t timeUs;
+ int64_t timeUs, originalTimeUs;
int32_t discontinuitySeq = 0;
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+ originalTimeUs = timeUs;
(*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq);
- strm.mCurDiscontinuitySeq = discontinuitySeq;
+ if (discontinuitySeq > (int32_t) strm.mCurDiscontinuitySeq) {
+ int64_t offsetTimeUs;
+ if (mDiscontinuityOffsetTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ } else {
+ offsetTimeUs = 0;
+ }
+
+ if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ int64_t firstTimeUs;
+ firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs;
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ } else {
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ }
+
+ mDiscontinuityOffsetTimesUs.add(discontinuitySeq, offsetTimeUs);
+ strm.mCurDiscontinuitySeq = discontinuitySeq;
+ }
int32_t discard = 0;
int64_t firstTimeUs;
@@ -309,7 +309,8 @@ status_t LiveSession::dequeueAccessUnit(
timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq);
}
- ALOGV("[%s] read buffer at time %" PRId64 " us", streamStr, timeUs);
+ ALOGV("[%s] dequeueAccessUnit: time %lld us, original %lld us",
+ streamStr, (long long)timeUs, (long long)originalTimeUs);
(*accessUnit)->meta()->setInt64("timeUs", timeUs);
mLastDequeuedTimeUs = timeUs;
mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
@@ -331,7 +332,6 @@ status_t LiveSession::dequeueAccessUnit(
}
status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {
- // No swapPacketSource race condition; called from the same thread as dequeueAccessUnit.
if (!(mStreamMask & stream)) {
return UNKNOWN_ERROR;
}
@@ -344,12 +344,24 @@ status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {
return -EAGAIN;
}
+ if (stream == STREAMTYPE_AUDIO) {
+ // set AAC input buffer size to 32K bytes (256kbps x 1sec)
+ meta->setInt32(kKeyMaxInputSize, 32 * 1024);
+ } else if (stream == STREAMTYPE_VIDEO) {
+ meta->setInt32(kKeyMaxWidth, mMaxWidth);
+ meta->setInt32(kKeyMaxHeight, mMaxHeight);
+ }
+
return convertMetaDataToMessage(meta, format);
}
+sp<HTTPBase> LiveSession::getHTTPDataSource() {
+ return new MediaHTTP(mHTTPService->makeHTTPConnection());
+}
+
void LiveSession::connectAsync(
const char *url, const KeyedVector<String8, String8> *headers) {
- sp<AMessage> msg = new AMessage(kWhatConnect, id());
+ sp<AMessage> msg = new AMessage(kWhatConnect, this);
msg->setString("url", url);
if (headers != NULL) {
@@ -362,7 +374,7 @@ void LiveSession::connectAsync(
}
status_t LiveSession::disconnect() {
- sp<AMessage> msg = new AMessage(kWhatDisconnect, id());
+ sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -371,7 +383,7 @@ status_t LiveSession::disconnect() {
}
status_t LiveSession::seekTo(int64_t timeUs) {
- sp<AMessage> msg = new AMessage(kWhatSeek, id());
+ sp<AMessage> msg = new AMessage(kWhatSeek, this);
msg->setInt64("timeUs", timeUs);
sp<AMessage> response;
@@ -380,6 +392,95 @@ status_t LiveSession::seekTo(int64_t timeUs) {
return err;
}
+bool LiveSession::checkSwitchProgress(
+ sp<AMessage> &stopParams, int64_t delayUs, bool *needResumeUntil) {
+ AString newUri;
+ CHECK(stopParams->findString("uri", &newUri));
+
+ *needResumeUntil = false;
+ sp<AMessage> firstNewMeta[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ StreamType stream = indexToType(i);
+ if (!(mSwapMask & mNewStreamMask & stream)
+ || (mStreams[i].mNewUri != newUri)) {
+ continue;
+ }
+ if (stream == STREAMTYPE_SUBTITLES) {
+ continue;
+ }
+ sp<AnotherPacketSource> &source = mPacketSources.editValueAt(i);
+
+ // First, get latest dequeued meta, which is where the decoder is at.
+ // (when upswitching, we take the meta after a certain delay, so that
+ // the decoder is left with some cushion)
+ sp<AMessage> lastDequeueMeta, lastEnqueueMeta;
+ if (delayUs > 0) {
+ lastDequeueMeta = source->getMetaAfterLastDequeued(delayUs);
+ if (lastDequeueMeta == NULL) {
+ // this means we don't have enough cushion, try again later
+ ALOGV("[%s] up switching failed due to insufficient buffer",
+ getNameForStream(stream));
+ return false;
+ }
+ } else {
+ // It's okay for lastDequeueMeta to be NULL here, it means the
+ // decoder hasn't even started dequeueing
+ lastDequeueMeta = source->getLatestDequeuedMeta();
+ }
+ // Then, trim off packets at beginning of mPacketSources2 that's before
+ // the latest dequeued time. These samples are definitely too late.
+ firstNewMeta[i] = mPacketSources2.editValueAt(i)
+ ->trimBuffersBeforeMeta(lastDequeueMeta);
+
+ // Now firstNewMeta[i] is the first sample after the trim.
+ // If it's NULL, we failed because dequeue already past all samples
+ // in mPacketSource2, we have to try again.
+ if (firstNewMeta[i] == NULL) {
+ HLSTime dequeueTime(lastDequeueMeta);
+ ALOGV("[%s] dequeue time (%d, %lld) past start time",
+ getNameForStream(stream),
+ dequeueTime.mSeq, (long long) dequeueTime.mTimeUs);
+ return false;
+ }
+
+ // Otherwise, we check if mPacketSources2 overlaps with what old fetcher
+ // already fetched, and see if we need to resumeUntil
+ lastEnqueueMeta = source->getLatestEnqueuedMeta();
+ // lastEnqueueMeta == NULL means old fetcher stopped at a discontinuity
+ // boundary, no need to resume as the content will look different anyways
+ if (lastEnqueueMeta != NULL) {
+ HLSTime lastTime(lastEnqueueMeta), startTime(firstNewMeta[i]);
+
+ // no need to resume old fetcher if new fetcher started in different
+ // discontinuity sequence, as the content will look different.
+ *needResumeUntil |= (startTime.mSeq == lastTime.mSeq
+ && startTime.mTimeUs - lastTime.mTimeUs > kResumeThresholdUs);
+
+ // update the stopTime for resumeUntil
+ stopParams->setInt32("discontinuitySeq", startTime.mSeq);
+ stopParams->setInt64(getKeyForStream(stream), startTime.mTimeUs);
+ }
+ }
+
+ // if we're here, it means dequeue progress hasn't passed some samples in
+ // mPacketSource2, we can trim off the excess in mPacketSource.
+ // (old fetcher might still need to resumeUntil the start time of new fetcher)
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ StreamType stream = indexToType(i);
+ if (!(mSwapMask & mNewStreamMask & stream)
+ || (newUri != mStreams[i].mNewUri)
+ || stream == STREAMTYPE_SUBTITLES) {
+ continue;
+ }
+ mPacketSources.valueFor(stream)->trimBuffersAfterMeta(firstNewMeta[i]);
+ }
+
+ // no resumeUntil if already underflow
+ *needResumeUntil &= !mBuffering;
+
+ return true;
+}
+
void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatConnect:
@@ -402,16 +503,15 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSeek:
{
- uint32_t seekReplyID;
- CHECK(msg->senderAwaitsResponse(&seekReplyID));
- mSeekReplyID = seekReplyID;
- mSeekReply = new AMessage;
-
- status_t err = onSeek(msg);
-
- if (err != OK) {
+ if (mReconfigurationInProgress) {
msg->post(50000);
+ break;
}
+
+ CHECK(msg->senderAwaitsResponse(&mSeekReplyID));
+ mSeekReply = new AMessage;
+
+ onSeek(msg);
break;
}
@@ -426,16 +526,30 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
case PlaylistFetcher::kWhatPaused:
case PlaylistFetcher::kWhatStopped:
{
- if (what == PlaylistFetcher::kWhatStopped) {
- AString uri;
- CHECK(msg->findString("uri", &uri));
- if (mFetcherInfos.removeItem(uri) < 0) {
- // ignore duplicated kWhatStopped messages.
- break;
- }
+ AString uri;
+ CHECK(msg->findString("uri", &uri));
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index < 0) {
+ // ignore msgs from fetchers that's already gone
+ break;
+ }
- if (mSwitchInProgress) {
- tryToFinishBandwidthSwitch();
+ ALOGV("fetcher-%d %s",
+ mFetcherInfos[index].mFetcher->getFetcherID(),
+ what == PlaylistFetcher::kWhatPaused ?
+ "paused" : "stopped");
+
+ if (what == PlaylistFetcher::kWhatStopped) {
+ mFetcherLooper->unregisterHandler(
+ mFetcherInfos[index].mFetcher->id());
+ mFetcherInfos.removeItemsAt(index);
+ } else if (what == PlaylistFetcher::kWhatPaused) {
+ int32_t seekMode;
+ CHECK(msg->findInt32("seekMode", &seekMode));
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mUri == uri) {
+ mStreams[i].mSeekMode = (SeekMode) seekMode;
+ }
}
}
@@ -443,15 +557,8 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
CHECK_GT(mContinuationCounter, 0);
if (--mContinuationCounter == 0) {
mContinuation->post();
-
- if (mSeekReplyID != 0) {
- CHECK(mSeekReply != NULL);
- mSeekReply->setInt32("err", OK);
- mSeekReply->postReply(mSeekReplyID);
- mSeekReplyID = 0;
- mSeekReply.clear();
- }
}
+ ALOGV("%zu fetcher(s) left", mContinuationCounter);
}
break;
}
@@ -464,8 +571,21 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
int64_t durationUs;
CHECK(msg->findInt64("durationUs", &durationUs));
- FetcherInfo *info = &mFetcherInfos.editValueFor(uri);
- info->mDurationUs = durationUs;
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index >= 0) {
+ FetcherInfo *info = &mFetcherInfos.editValueFor(uri);
+ info->mDurationUs = durationUs;
+ }
+ break;
+ }
+
+ case PlaylistFetcher::kWhatTargetDurationUpdate:
+ {
+ int64_t targetDurationUs;
+ CHECK(msg->findInt64("targetDurationUs", &targetDurationUs));
+ mUpSwitchMark = min(kUpSwitchMarkUs, targetDurationUs * 7 / 4);
+ mDownSwitchMark = min(kDownSwitchMarkUs, targetDurationUs * 9 / 4);
+ mUpSwitchMargin = min(kUpSwitchMarginUs, targetDurationUs);
break;
}
@@ -506,38 +626,23 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
mPacketSources.valueFor(
STREAMTYPE_SUBTITLES)->signalEOS(err);
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatError);
- notify->setInt32("err", err);
- notify->post();
+ postError(err);
break;
}
- case PlaylistFetcher::kWhatTemporarilyDoneFetching:
+ case PlaylistFetcher::kWhatStopReached:
{
- AString uri;
- CHECK(msg->findString("uri", &uri));
+ ALOGV("kWhatStopReached");
- if (mFetcherInfos.indexOfKey(uri) < 0) {
- ALOGE("couldn't find uri");
+ AString oldUri;
+ CHECK(msg->findString("uri", &oldUri));
+
+ ssize_t index = mFetcherInfos.indexOfKey(oldUri);
+ if (index < 0) {
break;
}
- FetcherInfo *info = &mFetcherInfos.editValueFor(uri);
- info->mIsPrepared = true;
-
- if (mInPreparationPhase) {
- bool allFetchersPrepared = true;
- for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- if (!mFetcherInfos.valueAt(i).mIsPrepared) {
- allFetchersPrepared = false;
- break;
- }
- }
- if (allFetchersPrepared) {
- postPrepared(OK);
- }
- }
+ tryToFinishBandwidthSwitch(oldUri);
break;
}
@@ -546,19 +651,80 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
int32_t switchGeneration;
CHECK(msg->findInt32("switchGeneration", &switchGeneration));
+ ALOGV("kWhatStartedAt: switchGen=%d, mSwitchGen=%d",
+ switchGeneration, mSwitchGeneration);
+
if (switchGeneration != mSwitchGeneration) {
break;
}
- // Resume fetcher for the original variant; the resumed fetcher should
- // continue until the timestamps found in msg, which is stored by the
- // new fetcher to indicate where the new variant has started buffering.
- for (size_t i = 0; i < mFetcherInfos.size(); i++) {
- const FetcherInfo info = mFetcherInfos.valueAt(i);
- if (info.mToBeRemoved) {
- info.mFetcher->resumeUntilAsync(msg);
+ AString uri;
+ CHECK(msg->findString("uri", &uri));
+
+ // mark new fetcher mToBeResumed
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index >= 0) {
+ mFetcherInfos.editValueAt(index).mToBeResumed = true;
+ }
+
+ // temporarily disable packet sources to be swapped to prevent
+ // NuPlayerDecoder from dequeuing while we check progress
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ if ((mSwapMask & mPacketSources.keyAt(i))
+ && uri == mStreams[i].mNewUri) {
+ mPacketSources.editValueAt(i)->enable(false);
+ }
+ }
+ bool switchUp = (mCurBandwidthIndex > mOrigBandwidthIndex);
+ // If switching up, require a cushion bigger than kUnderflowMark
+ // to avoid buffering immediately after the switch.
+ // (If we don't have that cushion we'd rather cancel and try again.)
+ int64_t delayUs = switchUp ? (kUnderflowMarkUs + 1000000ll) : 0;
+ bool needResumeUntil = false;
+ sp<AMessage> stopParams = msg;
+ if (checkSwitchProgress(stopParams, delayUs, &needResumeUntil)) {
+ // playback time hasn't passed startAt time
+ if (!needResumeUntil) {
+ ALOGV("finish switch");
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if ((mSwapMask & indexToType(i))
+ && uri == mStreams[i].mNewUri) {
+ // have to make a copy of mStreams[i].mUri because
+ // tryToFinishBandwidthSwitch is modifying mStreams[]
+ AString oldURI = mStreams[i].mUri;
+ tryToFinishBandwidthSwitch(oldURI);
+ break;
+ }
+ }
+ } else {
+ // startAt time is after last enqueue time
+ // Resume fetcher for the original variant; the resumed fetcher should
+ // continue until the timestamps found in msg, which is stored by the
+ // new fetcher to indicate where the new variant has started buffering.
+ ALOGV("finish switch with resumeUntilAsync");
+ for (size_t i = 0; i < mFetcherInfos.size(); i++) {
+ const FetcherInfo &info = mFetcherInfos.valueAt(i);
+ if (info.mToBeRemoved) {
+ info.mFetcher->resumeUntilAsync(stopParams);
+ }
+ }
+ }
+ } else {
+ // playback time passed startAt time
+ if (switchUp) {
+ // if switching up, cancel and retry if condition satisfies again
+ ALOGV("cancel up switch because we're too late");
+ cancelBandwidthSwitch(true /* resume */);
+ } else {
+ ALOGV("retry down switch at next sample");
+ resumeFetcher(uri, mSwapMask, -1, true /* newUri */);
}
}
+ // re-enable all packet sources
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ mPacketSources.editValueAt(i)->enable(true);
+ }
+
break;
}
@@ -569,19 +735,6 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatCheckBandwidth:
- {
- int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
-
- if (generation != mCheckBandwidthGeneration) {
- break;
- }
-
- onCheckBandwidth(msg);
- break;
- }
-
case kWhatChangeConfiguration:
{
onChangeConfiguration(msg);
@@ -606,21 +759,13 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatSwapped:
+ case kWhatPollBuffering:
{
- onSwapped(msg);
- break;
- }
-
- case kWhatCheckSwitchDown:
- {
- onCheckSwitchDown();
- break;
- }
-
- case kWhatSwitchDown:
- {
- onSwitchDown();
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation == mPollBufferingGeneration) {
+ onPollBuffering();
+ }
break;
}
@@ -691,6 +836,14 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
return;
}
+ // create looper for fetchers
+ if (mFetcherLooper == NULL) {
+ mFetcherLooper = new ALooper();
+
+ mFetcherLooper->setName("Fetcher");
+ mFetcherLooper->start(false, false);
+ }
+
// We trust the content provider to make a reasonable choice of preferred
// initial bandwidth by listing it first in the variant playlist.
// At startup we really don't have a good estimate on the available
@@ -699,7 +852,11 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
size_t initialBandwidth = 0;
size_t initialBandwidthIndex = 0;
+ int32_t maxWidth = 0;
+ int32_t maxHeight = 0;
+
if (mPlaylist->isVariantPlaylist()) {
+ Vector<BandwidthItem> itemsWithVideo;
for (size_t i = 0; i < mPlaylist->size(); ++i) {
BandwidthItem item;
@@ -711,14 +868,30 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
- if (initialBandwidth == 0) {
- initialBandwidth = item.mBandwidth;
+ int32_t width, height;
+ if (meta->findInt32("width", &width)) {
+ maxWidth = max(maxWidth, width);
+ }
+ if (meta->findInt32("height", &height)) {
+ maxHeight = max(maxHeight, height);
}
mBandwidthItems.push(item);
+ if (mPlaylist->hasType(i, "video")) {
+ itemsWithVideo.push(item);
+ }
+ }
+ // remove the audio-only variants if we have at least one with video
+ if (!itemsWithVideo.empty()
+ && itemsWithVideo.size() < mBandwidthItems.size()) {
+ mBandwidthItems.clear();
+ for (size_t i = 0; i < itemsWithVideo.size(); ++i) {
+ mBandwidthItems.push(itemsWithVideo[i]);
+ }
}
CHECK_GT(mBandwidthItems.size(), 0u);
+ initialBandwidth = mBandwidthItems[0].mBandwidth;
mBandwidthItems.sort(SortByBandwidth);
@@ -736,28 +909,29 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
mBandwidthItems.push(item);
}
+ mMaxWidth = maxWidth > 0 ? maxWidth : mMaxWidth;
+ mMaxHeight = maxHeight > 0 ? maxHeight : mMaxHeight;
+
mPlaylist->pickRandomMediaItems();
changeConfiguration(
0ll /* timeUs */, initialBandwidthIndex, false /* pickTrack */);
}
void LiveSession::finishDisconnect() {
+ ALOGV("finishDisconnect");
+
// No reconfiguration is currently pending, make sure none will trigger
// during disconnection either.
- cancelCheckBandwidthEvent();
-
- // Protect mPacketSources from a swapPacketSource race condition through disconnect.
- // (finishDisconnect, onFinishDisconnect2)
cancelBandwidthSwitch();
- // cancel switch down monitor
- mSwitchDownMonitor.clear();
+ // cancel buffer polling
+ cancelPollBuffering();
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
mFetcherInfos.valueAt(i).mFetcher->stopAsync();
}
- sp<AMessage> msg = new AMessage(kWhatFinishDisconnect2, id());
+ sp<AMessage> msg = new AMessage(kWhatFinishDisconnect2, this);
mContinuationCounter = mFetcherInfos.size();
mContinuation = msg;
@@ -780,7 +954,7 @@ void LiveSession::onFinishDisconnect2() {
response->setInt32("err", OK);
response->postReply(mDisconnectReplyID);
- mDisconnectReplyID = 0;
+ mDisconnectReplyID.clear();
}
sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {
@@ -790,16 +964,17 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {
return NULL;
}
- sp<AMessage> notify = new AMessage(kWhatFetcherNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatFetcherNotify, this);
notify->setString("uri", uri);
notify->setInt32("switchGeneration", mSwitchGeneration);
FetcherInfo info;
- info.mFetcher = new PlaylistFetcher(notify, this, uri, mSubtitleGeneration);
+ info.mFetcher = new PlaylistFetcher(
+ notify, this, uri, mCurBandwidthIndex, mSubtitleGeneration);
info.mDurationUs = -1ll;
- info.mIsPrepared = false;
info.mToBeRemoved = false;
- looper()->registerHandler(info.mFetcher);
+ info.mToBeResumed = false;
+ mFetcherLooper->registerHandler(info.mFetcher);
mFetcherInfos.add(uri, info);
@@ -827,14 +1002,15 @@ ssize_t LiveSession::fetchFile(
int64_t range_offset, int64_t range_length,
uint32_t block_size, /* download block size */
sp<DataSource> *source, /* to return and reuse source */
- String8 *actualUrl) {
+ String8 *actualUrl,
+ bool forceConnectHTTP /* force connect HTTP when resuing source */) {
off64_t size;
sp<DataSource> temp_source;
if (source == NULL) {
source = &temp_source;
}
- if (*source == NULL) {
+ if (*source == NULL || forceConnectHTTP) {
if (!strncasecmp(url, "file://", 7)) {
*source = new FileSource(url + 7);
} else if (strncasecmp(url, "http://", 7)
@@ -853,13 +1029,18 @@ ssize_t LiveSession::fetchFile(
? "" : AStringPrintf("%lld",
range_offset + range_length - 1).c_str()).c_str()));
}
- status_t err = mHTTPDataSource->connect(url, &headers);
+
+ HTTPBase* httpDataSource =
+ (*source == NULL) ? mHTTPDataSource.get() : (HTTPBase*)source->get();
+ status_t err = httpDataSource->connect(url, &headers);
if (err != OK) {
return err;
}
- *source = mHTTPDataSource;
+ if (*source == NULL) {
+ *source = mHTTPDataSource;
+ }
}
}
@@ -949,6 +1130,9 @@ sp<M3UParser> LiveSession::fetchPlaylist(
String8 actualUrl;
ssize_t err = fetchFile(url, &buffer, 0, -1, 0, NULL, &actualUrl);
+ // close off the connection after use
+ mHTTPDataSource->disconnect();
+
if (err <= 0) {
return NULL;
}
@@ -995,8 +1179,112 @@ static double uniformRand() {
}
#endif
-size_t LiveSession::getBandwidthIndex() {
- if (mBandwidthItems.size() == 0) {
+bool LiveSession::resumeFetcher(
+ const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) {
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index < 0) {
+ ALOGE("did not find fetcher for uri: %s", uri.c_str());
+ return false;
+ }
+
+ bool resume = false;
+ sp<AnotherPacketSource> sources[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if ((streamMask & indexToType(i))
+ && ((!newUri && uri == mStreams[i].mUri)
+ || (newUri && uri == mStreams[i].mNewUri))) {
+ resume = true;
+ if (newUri) {
+ sources[i] = mPacketSources2.valueFor(indexToType(i));
+ sources[i]->clear();
+ } else {
+ sources[i] = mPacketSources.valueFor(indexToType(i));
+ }
+ }
+ }
+
+ if (resume) {
+ sp<PlaylistFetcher> &fetcher = mFetcherInfos.editValueAt(index).mFetcher;
+ SeekMode seekMode = newUri ? kSeekModeNextSample : kSeekModeExactPosition;
+
+ ALOGV("resuming fetcher-%d, timeUs=%lld, seekMode=%d",
+ fetcher->getFetcherID(), (long long)timeUs, seekMode);
+
+ fetcher->startAsync(
+ sources[kAudioIndex],
+ sources[kVideoIndex],
+ sources[kSubtitleIndex],
+ timeUs, -1, -1, seekMode);
+ }
+
+ return resume;
+}
+
+float LiveSession::getAbortThreshold(
+ ssize_t currentBWIndex, ssize_t targetBWIndex) const {
+ float abortThreshold = -1.0f;
+ if (currentBWIndex > 0 && targetBWIndex < currentBWIndex) {
+ /*
+ If we're switching down, we need to decide whether to
+
+ 1) finish last segment of high-bandwidth variant, or
+ 2) abort last segment of high-bandwidth variant, and fetch an
+ overlapping portion from low-bandwidth variant.
+
+ Here we try to maximize the amount of buffer left when the
+ switch point is met. Given the following parameters:
+
+ B: our current buffering level in seconds
+ T: target duration in seconds
+ X: sample duration in seconds remain to fetch in last segment
+ bw0: bandwidth of old variant (as specified in playlist)
+ bw1: bandwidth of new variant (as specified in playlist)
+ bw: measured bandwidth available
+
+ If we choose 1), when switch happens at the end of current
+ segment, our buffering will be
+ B + X - X * bw0 / bw
+
+ If we choose 2), when switch happens where we aborted current
+ segment, our buffering will be
+ B - (T - X) * bw1 / bw
+
+ We should only choose 1) if
+ X/T < bw1 / (bw1 + bw0 - bw)
+ */
+
+ // Taking the measured current bandwidth at 50% face value only,
+ // as our bandwidth estimation is a lagging indicator. Being
+ // conservative on this, we prefer switching to lower bandwidth
+ // unless we're really confident finishing up the last segment
+ // of higher bandwidth will be fast.
+ CHECK(mLastBandwidthBps >= 0);
+ abortThreshold =
+ (float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth
+ / ((float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth
+ + (float)mBandwidthItems.itemAt(currentBWIndex).mBandwidth
+ - (float)mLastBandwidthBps * 0.5f);
+ if (abortThreshold < 0.0f) {
+ abortThreshold = -1.0f; // do not abort
+ }
+ ALOGV("Switching Down: bps %ld => %ld, measured %d, abort ratio %.2f",
+ mBandwidthItems.itemAt(currentBWIndex).mBandwidth,
+ mBandwidthItems.itemAt(targetBWIndex).mBandwidth,
+ mLastBandwidthBps,
+ abortThreshold);
+ }
+ return abortThreshold;
+}
+
+void LiveSession::addBandwidthMeasurement(size_t numBytes, int64_t delayUs) {
+ mBandwidthEstimator->addBandwidthMeasurement(numBytes, delayUs);
+}
+
+size_t LiveSession::getBandwidthIndex(int32_t bandwidthBps) {
+ if (mBandwidthItems.size() < 2) {
+ // shouldn't be here if we only have 1 bandwidth, check
+ // logic to get rid of redundant bandwidth polling
+ ALOGW("getBandwidthIndex() called for single bandwidth playlist!");
return 0;
}
@@ -1014,15 +1302,6 @@ size_t LiveSession::getBandwidthIndex() {
}
if (index < 0) {
- int32_t bandwidthBps;
- if (mHTTPDataSource != NULL
- && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) {
- ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
- } else {
- ALOGV("no bandwidth estimate.");
- return 0; // Pick the lowest bandwidth stream by default.
- }
-
char value[PROPERTY_VALUE_MAX];
if (property_get("media.httplive.max-bw", value, NULL)) {
char *end;
@@ -1039,15 +1318,9 @@ size_t LiveSession::getBandwidthIndex() {
index = mBandwidthItems.size() - 1;
while (index > 0) {
- // consider only 80% of the available bandwidth, but if we are switching up,
- // be even more conservative (70%) to avoid overestimating and immediately
- // switching back.
- size_t adjustedBandwidthBps = bandwidthBps;
- if (index > mCurBandwidthIndex) {
- adjustedBandwidthBps = adjustedBandwidthBps * 7 / 10;
- } else {
- adjustedBandwidthBps = adjustedBandwidthBps * 8 / 10;
- }
+ // be conservative (70%) to avoid overestimating and immediately
+ // switching down again.
+ size_t adjustedBandwidthBps = bandwidthBps * 7 / 10;
if (mBandwidthItems.itemAt(index).mBandwidth <= adjustedBandwidthBps) {
break;
}
@@ -1107,34 +1380,20 @@ size_t LiveSession::getBandwidthIndex() {
return index;
}
-int64_t LiveSession::latestMediaSegmentStartTimeUs() {
- sp<AMessage> audioMeta = mPacketSources.valueFor(STREAMTYPE_AUDIO)->getLatestDequeuedMeta();
- int64_t minSegmentStartTimeUs = -1, videoSegmentStartTimeUs = -1;
- if (audioMeta != NULL) {
- audioMeta->findInt64("segmentStartTimeUs", &minSegmentStartTimeUs);
- }
+HLSTime LiveSession::latestMediaSegmentStartTime() const {
+ HLSTime audioTime(mPacketSources.valueFor(
+ STREAMTYPE_AUDIO)->getLatestDequeuedMeta());
- sp<AMessage> videoMeta = mPacketSources.valueFor(STREAMTYPE_VIDEO)->getLatestDequeuedMeta();
- if (videoMeta != NULL
- && videoMeta->findInt64("segmentStartTimeUs", &videoSegmentStartTimeUs)) {
- if (minSegmentStartTimeUs < 0 || videoSegmentStartTimeUs < minSegmentStartTimeUs) {
- minSegmentStartTimeUs = videoSegmentStartTimeUs;
- }
+ HLSTime videoTime(mPacketSources.valueFor(
+ STREAMTYPE_VIDEO)->getLatestDequeuedMeta());
- }
- return minSegmentStartTimeUs;
+ return audioTime < videoTime ? videoTime : audioTime;
}
-status_t LiveSession::onSeek(const sp<AMessage> &msg) {
+void LiveSession::onSeek(const sp<AMessage> &msg) {
int64_t timeUs;
CHECK(msg->findInt64("timeUs", &timeUs));
-
- if (!mReconfigurationInProgress) {
- changeConfiguration(timeUs, mCurBandwidthIndex);
- return OK;
- } else {
- return -EWOULDBLOCK;
- }
+ changeConfiguration(timeUs);
}
status_t LiveSession::getDuration(int64_t *durationUs) const {
@@ -1182,11 +1441,13 @@ status_t LiveSession::selectTrack(size_t index, bool select) {
return INVALID_OPERATION;
}
+ ALOGV("selectTrack: index=%zu, select=%d, mSubtitleGen=%d++",
+ index, select, mSubtitleGeneration);
+
++mSubtitleGeneration;
status_t err = mPlaylist->selectTrack(index, select);
if (err == OK) {
- sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, id());
- msg->setInt32("bandwidthIndex", mCurBandwidthIndex);
+ sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, this);
msg->setInt32("pickTrack", select);
msg->post();
}
@@ -1201,35 +1462,25 @@ ssize_t LiveSession::getSelectedTrack(media_track_type type) const {
}
}
-bool LiveSession::canSwitchUp() {
- // Allow upwards bandwidth switch when a stream has buffered at least 10 seconds.
- status_t err = OK;
- for (size_t i = 0; i < mPacketSources.size(); ++i) {
- sp<AnotherPacketSource> source = mPacketSources.valueAt(i);
- int64_t dur = source->getBufferedDurationUs(&err);
- if (err == OK && dur > 10000000) {
- return true;
- }
- }
- return false;
-}
-
void LiveSession::changeConfiguration(
- int64_t timeUs, size_t bandwidthIndex, bool pickTrack) {
- // Protect mPacketSources from a swapPacketSource race condition through reconfiguration.
- // (changeConfiguration, onChangeConfiguration2, onChangeConfiguration3).
+ int64_t timeUs, ssize_t bandwidthIndex, bool pickTrack) {
+ ALOGV("changeConfiguration: timeUs=%lld us, bwIndex=%zd, pickTrack=%d",
+ (long long)timeUs, bandwidthIndex, pickTrack);
+
cancelBandwidthSwitch();
CHECK(!mReconfigurationInProgress);
mReconfigurationInProgress = true;
-
- mCurBandwidthIndex = bandwidthIndex;
-
- ALOGV("changeConfiguration => timeUs:%" PRId64 " us, bwIndex:%zu, pickTrack:%d",
- timeUs, bandwidthIndex, pickTrack);
-
- CHECK_LT(bandwidthIndex, mBandwidthItems.size());
- const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex);
+ if (bandwidthIndex >= 0) {
+ mOrigBandwidthIndex = mCurBandwidthIndex;
+ mCurBandwidthIndex = bandwidthIndex;
+ if (mOrigBandwidthIndex != mCurBandwidthIndex) {
+ ALOGI("#### Starting Bandwidth Switch: %zd => %zd",
+ mOrigBandwidthIndex, mCurBandwidthIndex);
+ }
+ }
+ CHECK_LT(mCurBandwidthIndex, mBandwidthItems.size());
+ const BandwidthItem &item = mBandwidthItems.itemAt(mCurBandwidthIndex);
uint32_t streamMask = 0; // streams that should be fetched by the new fetcher
uint32_t resumeMask = 0; // streams that should be fetched by the original fetcher
@@ -1244,38 +1495,59 @@ void LiveSession::changeConfiguration(
// Step 1, stop and discard fetchers that are no longer needed.
// Pause those that we'll reuse.
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- const AString &uri = mFetcherInfos.keyAt(i);
-
- bool discardFetcher = true;
+ // skip fetchers that are marked mToBeRemoved,
+ // these are done and can't be reused
+ if (mFetcherInfos[i].mToBeRemoved) {
+ continue;
+ }
- // If we're seeking all current fetchers are discarded.
- if (timeUs < 0ll) {
- // delay fetcher removal if not picking tracks
- discardFetcher = pickTrack;
+ const AString &uri = mFetcherInfos.keyAt(i);
+ sp<PlaylistFetcher> &fetcher = mFetcherInfos.editValueAt(i).mFetcher;
- for (size_t j = 0; j < kMaxStreams; ++j) {
- StreamType type = indexToType(j);
- if ((streamMask & type) && uri == URIs[j]) {
- resumeMask |= type;
- streamMask &= ~type;
- discardFetcher = false;
- }
+ bool discardFetcher = true, delayRemoval = false;
+ for (size_t j = 0; j < kMaxStreams; ++j) {
+ StreamType type = indexToType(j);
+ if ((streamMask & type) && uri == URIs[j]) {
+ resumeMask |= type;
+ streamMask &= ~type;
+ discardFetcher = false;
}
}
+ // Delay fetcher removal if not picking tracks, AND old fetcher
+ // has stream mask that overlaps new variant. (Okay to discard
+ // old fetcher now, if completely no overlap.)
+ if (discardFetcher && timeUs < 0ll && !pickTrack
+ && (fetcher->getStreamTypeMask() & streamMask)) {
+ discardFetcher = false;
+ delayRemoval = true;
+ }
if (discardFetcher) {
- mFetcherInfos.valueAt(i).mFetcher->stopAsync();
+ ALOGV("discarding fetcher-%d", fetcher->getFetcherID());
+ fetcher->stopAsync();
} else {
- mFetcherInfos.valueAt(i).mFetcher->pauseAsync();
+ float threshold = -1.0f; // always finish fetching by default
+ if (timeUs >= 0ll) {
+ // seeking, no need to finish fetching
+ threshold = 0.0f;
+ } else if (delayRemoval) {
+ // adapting, abort if remaining of current segment is over threshold
+ threshold = getAbortThreshold(
+ mOrigBandwidthIndex, mCurBandwidthIndex);
+ }
+
+ ALOGV("pausing fetcher-%d, threshold=%.2f",
+ fetcher->getFetcherID(), threshold);
+ fetcher->pauseAsync(threshold);
}
}
sp<AMessage> msg;
if (timeUs < 0ll) {
// skip onChangeConfiguration2 (decoder destruction) if not seeking.
- msg = new AMessage(kWhatChangeConfiguration3, id());
+ msg = new AMessage(kWhatChangeConfiguration3, this);
} else {
- msg = new AMessage(kWhatChangeConfiguration2, id());
+ msg = new AMessage(kWhatChangeConfiguration2, this);
}
msg->setInt32("streamMask", streamMask);
msg->setInt32("resumeMask", resumeMask);
@@ -1296,40 +1568,65 @@ void LiveSession::changeConfiguration(
if (mContinuationCounter == 0) {
msg->post();
-
- if (mSeekReplyID != 0) {
- CHECK(mSeekReply != NULL);
- mSeekReply->setInt32("err", OK);
- mSeekReply->postReply(mSeekReplyID);
- mSeekReplyID = 0;
- mSeekReply.clear();
- }
}
}
void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) {
+ ALOGV("onChangeConfiguration");
+
if (!mReconfigurationInProgress) {
- int32_t pickTrack = 0, bandwidthIndex = mCurBandwidthIndex;
+ int32_t pickTrack = 0;
msg->findInt32("pickTrack", &pickTrack);
- msg->findInt32("bandwidthIndex", &bandwidthIndex);
- changeConfiguration(-1ll /* timeUs */, bandwidthIndex, pickTrack);
+ changeConfiguration(-1ll /* timeUs */, -1, pickTrack);
} else {
msg->post(1000000ll); // retry in 1 sec
}
}
void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
+ ALOGV("onChangeConfiguration2");
+
mContinuation.clear();
// All fetchers are either suspended or have been removed now.
+ // If we're seeking, clear all packet sources before we report
+ // seek complete, to prevent decoder from pulling stale data.
+ int64_t timeUs;
+ CHECK(msg->findInt64("timeUs", &timeUs));
+
+ if (timeUs >= 0) {
+ mLastSeekTimeUs = timeUs;
+ mLastDequeuedTimeUs = timeUs;
+
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ mPacketSources.editValueAt(i)->clear();
+ }
+
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ mStreams[i].mCurDiscontinuitySeq = 0;
+ }
+
+ mDiscontinuityOffsetTimesUs.clear();
+ mDiscontinuityAbsStartTimesUs.clear();
+
+ if (mSeekReplyID != NULL) {
+ CHECK(mSeekReply != NULL);
+ mSeekReply->setInt32("err", OK);
+ mSeekReply->postReply(mSeekReplyID);
+ mSeekReplyID.clear();
+ mSeekReply.clear();
+ }
+
+ // restart buffer polling after seek becauese previous
+ // buffering position is no longer valid.
+ restartPollBuffering();
+ }
+
uint32_t streamMask, resumeMask;
CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));
CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask));
- // currently onChangeConfiguration2 is only called for seeking;
- // remove the following CHECK if using it else where.
- CHECK_EQ(resumeMask, 0);
streamMask |= resumeMask;
AString URIs[kMaxStreams];
@@ -1341,17 +1638,27 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
}
}
- // Determine which decoders to shutdown on the player side,
- // a decoder has to be shutdown if either
- // 1) its streamtype was active before but now longer isn't.
- // or
- // 2) its streamtype was already active and still is but the URI
- // has changed.
uint32_t changedMask = 0;
for (size_t i = 0; i < kMaxStreams && i != kSubtitleIndex; ++i) {
- if (((mStreamMask & streamMask & indexToType(i))
- && !(URIs[i] == mStreams[i].mUri))
- || (mStreamMask & ~streamMask & indexToType(i))) {
+ // stream URI could change even if onChangeConfiguration2 is only
+ // used for seek. Seek could happen during a bw switch, in this
+ // case bw switch will be cancelled, but the seekTo position will
+ // fetch from the new URI.
+ if ((mStreamMask & streamMask & indexToType(i))
+ && !mStreams[i].mUri.empty()
+ && !(URIs[i] == mStreams[i].mUri)) {
+ ALOGV("stream %zu changed: oldURI %s, newURI %s", i,
+ mStreams[i].mUri.c_str(), URIs[i].c_str());
+ sp<AnotherPacketSource> source = mPacketSources.valueFor(indexToType(i));
+ if (source->getLatestDequeuedMeta() != NULL) {
+ source->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true);
+ }
+ }
+ // Determine which decoders to shutdown on the player side,
+ // a decoder has to be shutdown if its streamtype was active
+ // before but now longer isn't.
+ if ((mStreamMask & ~streamMask & indexToType(i))) {
changedMask |= indexToType(i);
}
}
@@ -1372,7 +1679,7 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
notify->setInt32("changedMask", changedMask);
msg->setWhat(kWhatChangeConfiguration3);
- msg->setTarget(id());
+ msg->setTarget(this);
notify->setMessage("reply", msg);
notify->post();
@@ -1387,6 +1694,8 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));
CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask));
+ mNewStreamMask = streamMask | resumeMask;
+
int64_t timeUs;
int32_t pickTrack;
bool switching = false;
@@ -1395,13 +1704,26 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
if (timeUs < 0ll) {
if (!pickTrack) {
- switching = true;
+ // mSwapMask contains streams that are in both old and new variant,
+ // (in mNewStreamMask & mStreamMask) but with different URIs
+ // (not in resumeMask).
+ // For example, old variant has video and audio in two separate
+ // URIs, and new variant has only audio with unchanged URI. mSwapMask
+ // should be 0 as there is nothing to swap. We only need to stop video,
+ // and resume audio.
+ mSwapMask = mNewStreamMask & mStreamMask & ~resumeMask;
+ switching = (mSwapMask != 0);
}
mRealTimeBaseUs = ALooper::GetNowUs() - mLastDequeuedTimeUs;
} else {
mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
}
+ ALOGV("onChangeConfiguration3: timeUs=%lld, switching=%d, pickTrack=%d, "
+ "mStreamMask=0x%x, mNewStreamMask=0x%x, mSwapMask=0x%x",
+ (long long)timeUs, switching, pickTrack,
+ mStreamMask, mNewStreamMask, mSwapMask);
+
for (size_t i = 0; i < kMaxStreams; ++i) {
if (streamMask & indexToType(i)) {
if (switching) {
@@ -1412,47 +1734,21 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
}
}
- mNewStreamMask = streamMask | resumeMask;
- if (switching) {
- mSwapMask = mStreamMask & ~resumeMask;
- }
-
// Of all existing fetchers:
// * Resume fetchers that are still needed and assign them original packet sources.
// * Mark otherwise unneeded fetchers for removal.
ALOGV("resuming fetchers for mask 0x%08x", resumeMask);
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
const AString &uri = mFetcherInfos.keyAt(i);
+ if (!resumeFetcher(uri, resumeMask, timeUs)) {
+ ALOGV("marking fetcher-%d to be removed",
+ mFetcherInfos[i].mFetcher->getFetcherID());
- sp<AnotherPacketSource> sources[kMaxStreams];
- for (size_t j = 0; j < kMaxStreams; ++j) {
- if ((resumeMask & indexToType(j)) && uri == mStreams[j].mUri) {
- sources[j] = mPacketSources.valueFor(indexToType(j));
-
- if (j != kSubtitleIndex) {
- ALOGV("queueing dummy discontinuity for stream type %d", indexToType(j));
- sp<AnotherPacketSource> discontinuityQueue;
- discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
- discontinuityQueue->queueDiscontinuity(
- ATSParser::DISCONTINUITY_NONE,
- NULL,
- true);
- }
- }
- }
-
- FetcherInfo &info = mFetcherInfos.editValueAt(i);
- if (sources[kAudioIndex] != NULL || sources[kVideoIndex] != NULL
- || sources[kSubtitleIndex] != NULL) {
- info.mFetcher->startAsync(
- sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex]);
- } else {
- info.mToBeRemoved = true;
+ mFetcherInfos.editValueAt(i).mToBeRemoved = true;
}
}
// streamMask now only contains the types that need a new fetcher created.
-
if (streamMask != 0) {
ALOGV("creating new fetchers for mask 0x%08x", streamMask);
}
@@ -1470,13 +1766,12 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
sp<PlaylistFetcher> fetcher = addFetcher(uri.c_str());
CHECK(fetcher != NULL);
- int64_t startTimeUs = -1;
- int64_t segmentStartTimeUs = -1ll;
- int32_t discontinuitySeq = -1;
+ HLSTime startTime;
+ SeekMode seekMode = kSeekModeExactPosition;
sp<AnotherPacketSource> sources[kMaxStreams];
- if (i == kSubtitleIndex) {
- segmentStartTimeUs = latestMediaSegmentStartTimeUs();
+ if (i == kSubtitleIndex || (!pickTrack && !switching)) {
+ startTime = latestMediaSegmentStartTime();
}
// TRICKY: looping from i as earlier streams are already removed from streamMask
@@ -1486,63 +1781,50 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
sources[j] = mPacketSources.valueFor(indexToType(j));
if (timeUs >= 0) {
- sources[j]->clear();
- startTimeUs = timeUs;
-
- sp<AnotherPacketSource> discontinuityQueue;
- sp<AMessage> extra = new AMessage;
- extra->setInt64("timeUs", timeUs);
- discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
- discontinuityQueue->queueDiscontinuity(
- ATSParser::DISCONTINUITY_TIME, extra, true);
+ startTime.mTimeUs = timeUs;
} else {
int32_t type;
sp<AMessage> meta;
- if (pickTrack) {
- // selecting
+ if (!switching) {
+ // selecting, or adapting but no swap required
meta = sources[j]->getLatestDequeuedMeta();
} else {
- // adapting
+ // adapting and swap required
meta = sources[j]->getLatestEnqueuedMeta();
- }
-
- if (meta != NULL && !meta->findInt32("discontinuity", &type)) {
- int64_t tmpUs;
- int64_t tmpSegmentUs;
-
- CHECK(meta->findInt64("timeUs", &tmpUs));
- CHECK(meta->findInt64("segmentStartTimeUs", &tmpSegmentUs));
- if (startTimeUs < 0 || tmpSegmentUs < segmentStartTimeUs) {
- startTimeUs = tmpUs;
- segmentStartTimeUs = tmpSegmentUs;
- } else if (tmpSegmentUs == segmentStartTimeUs && tmpUs < startTimeUs) {
- startTimeUs = tmpUs;
+ if (meta != NULL && mCurBandwidthIndex > mOrigBandwidthIndex) {
+ // switching up
+ meta = sources[j]->getMetaAfterLastDequeued(mUpSwitchMargin);
}
+ }
- int32_t seq;
- CHECK(meta->findInt32("discontinuitySeq", &seq));
- if (discontinuitySeq < 0 || seq < discontinuitySeq) {
- discontinuitySeq = seq;
+ if (j != kSubtitleIndex && meta != NULL
+ && !meta->findInt32("discontinuity", &type)) {
+ HLSTime tmpTime(meta);
+ if (startTime < tmpTime) {
+ startTime = tmpTime;
}
}
- if (pickTrack) {
- // selecting track, queue discontinuities before content
+ if (!switching) {
+ // selecting, or adapting but no swap required
sources[j]->clear();
if (j == kSubtitleIndex) {
break;
}
- sp<AnotherPacketSource> discontinuityQueue;
- discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
- discontinuityQueue->queueDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true);
+
+ ALOGV("stream[%zu]: queue format change", j);
+ sources[j]->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, true);
} else {
- // adapting, queue discontinuities after resume
+ // switching, queue discontinuities after resume
sources[j] = mPacketSources2.valueFor(indexToType(j));
sources[j]->clear();
- uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
- if (extraStreams & indexToType(j)) {
- sources[j]->queueAccessUnit(createFormatChangeBuffer(/*swap*/ false));
+ // the new fetcher might be providing streams that used to be
+ // provided by two different fetchers, if one of the fetcher
+ // paused in the middle while the other somehow paused in next
+ // seg, we have to start from next seg.
+ if (seekMode < mStreams[j].mSeekMode) {
+ seekMode = mStreams[j].mSeekMode;
}
}
}
@@ -1551,54 +1833,103 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
}
}
+ ALOGV("[fetcher-%d] startAsync: startTimeUs %lld mLastSeekTimeUs %lld "
+ "segmentStartTimeUs %lld seekMode %d",
+ fetcher->getFetcherID(),
+ (long long)startTime.mTimeUs,
+ (long long)mLastSeekTimeUs,
+ (long long)startTime.getSegmentTimeUs(true /* midpoint */),
+ seekMode);
+
+ // Set the target segment start time to the middle point of the
+ // segment where the last sample was.
+ // This gives a better guess if segments of the two variants are not
+ // perfectly aligned. (If the corresponding segment in new variant
+ // starts slightly later than that in the old variant, we still want
+ // to pick that segment, not the one before)
fetcher->startAsync(
sources[kAudioIndex],
sources[kVideoIndex],
sources[kSubtitleIndex],
- startTimeUs < 0 ? mLastSeekTimeUs : startTimeUs,
- segmentStartTimeUs,
- discontinuitySeq,
- switching);
+ startTime.mTimeUs < 0 ? mLastSeekTimeUs : startTime.mTimeUs,
+ startTime.getSegmentTimeUs(true /* midpoint */),
+ startTime.mSeq,
+ seekMode);
}
// All fetchers have now been started, the configuration change
// has completed.
- cancelCheckBandwidthEvent();
- scheduleCheckBandwidthEvent();
-
- ALOGV("XXX configuration change completed.");
mReconfigurationInProgress = false;
if (switching) {
mSwitchInProgress = true;
} else {
mStreamMask = mNewStreamMask;
+ if (mOrigBandwidthIndex != mCurBandwidthIndex) {
+ ALOGV("#### Finished Bandwidth Switch Early: %zd => %zd",
+ mOrigBandwidthIndex, mCurBandwidthIndex);
+ mOrigBandwidthIndex = mCurBandwidthIndex;
+ }
}
- if (mDisconnectReplyID != 0) {
+ ALOGV("onChangeConfiguration3: mSwitchInProgress %d, mStreamMask 0x%x",
+ mSwitchInProgress, mStreamMask);
+
+ if (mDisconnectReplyID != NULL) {
finishDisconnect();
}
}
-void LiveSession::onSwapped(const sp<AMessage> &msg) {
- int32_t switchGeneration;
- CHECK(msg->findInt32("switchGeneration", &switchGeneration));
- if (switchGeneration != mSwitchGeneration) {
+void LiveSession::swapPacketSource(StreamType stream) {
+ ALOGV("[%s] swapPacketSource", getNameForStream(stream));
+
+ // transfer packets from source2 to source
+ sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream);
+ sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream);
+
+ // queue discontinuity in mPacketSource
+ aps->queueDiscontinuity(ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, false);
+
+ // queue packets in mPacketSource2 to mPacketSource
+ status_t finalResult = OK;
+ sp<ABuffer> accessUnit;
+ while (aps2->hasBufferAvailable(&finalResult) && finalResult == OK &&
+ OK == aps2->dequeueAccessUnit(&accessUnit)) {
+ aps->queueAccessUnit(accessUnit);
+ }
+ aps2->clear();
+}
+
+void LiveSession::tryToFinishBandwidthSwitch(const AString &oldUri) {
+ if (!mSwitchInProgress) {
return;
}
- int32_t stream;
- CHECK(msg->findInt32("stream", &stream));
+ ssize_t index = mFetcherInfos.indexOfKey(oldUri);
+ if (index < 0 || !mFetcherInfos[index].mToBeRemoved) {
+ return;
+ }
+
+ // Swap packet source of streams provided by old variant
+ for (size_t idx = 0; idx < kMaxStreams; idx++) {
+ StreamType stream = indexToType(idx);
+ if ((mSwapMask & stream) && (oldUri == mStreams[idx].mUri)) {
+ swapPacketSource(stream);
+
+ if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
+ ALOGW("swapping stream type %d %s to empty stream",
+ stream, mStreams[idx].mUri.c_str());
+ }
+ mStreams[idx].mUri = mStreams[idx].mNewUri;
+ mStreams[idx].mNewUri.clear();
- ssize_t idx = typeToIndex(stream);
- CHECK(idx >= 0);
- if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
- ALOGW("swapping stream type %d %s to empty stream", stream, mStreams[idx].mUri.c_str());
+ mSwapMask &= ~stream;
+ }
}
- mStreams[idx].mUri = mStreams[idx].mNewUri;
- mStreams[idx].mNewUri.clear();
- mSwapMask &= ~stream;
+ mFetcherInfos.editValueAt(index).mFetcher->stopAsync(false /* clear */);
+
+ ALOGV("tryToFinishBandwidthSwitch: mSwapMask=0x%x", mSwapMask);
if (mSwapMask != 0) {
return;
}
@@ -1606,155 +1937,322 @@ void LiveSession::onSwapped(const sp<AMessage> &msg) {
// Check if new variant contains extra streams.
uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
while (extraStreams) {
- StreamType extraStream = (StreamType) (extraStreams & ~(extraStreams - 1));
- swapPacketSource(extraStream);
- extraStreams &= ~extraStream;
+ StreamType stream = (StreamType) (extraStreams & ~(extraStreams - 1));
+ extraStreams &= ~stream;
+
+ swapPacketSource(stream);
- idx = typeToIndex(extraStream);
+ ssize_t idx = typeToIndex(stream);
CHECK(idx >= 0);
if (mStreams[idx].mNewUri.empty()) {
ALOGW("swapping extra stream type %d %s to empty stream",
- extraStream, mStreams[idx].mUri.c_str());
+ stream, mStreams[idx].mUri.c_str());
}
mStreams[idx].mUri = mStreams[idx].mNewUri;
mStreams[idx].mNewUri.clear();
}
- tryToFinishBandwidthSwitch();
-}
-
-void LiveSession::onCheckSwitchDown() {
- if (mSwitchDownMonitor == NULL) {
- return;
+ // Restart new fetcher (it was paused after the first 47k block)
+ // and let it fetch into mPacketSources (not mPacketSources2)
+ for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ FetcherInfo &info = mFetcherInfos.editValueAt(i);
+ if (info.mToBeResumed) {
+ resumeFetcher(mFetcherInfos.keyAt(i), mNewStreamMask);
+ info.mToBeResumed = false;
+ }
}
- if (mSwitchInProgress || mReconfigurationInProgress) {
- ALOGV("Switch/Reconfig in progress, defer switch down");
- mSwitchDownMonitor->post(1000000ll);
- return;
- }
+ ALOGI("#### Finished Bandwidth Switch: %zd => %zd",
+ mOrigBandwidthIndex, mCurBandwidthIndex);
- for (size_t i = 0; i < kMaxStreams; ++i) {
- int32_t targetDuration;
- sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(indexToType(i));
- sp<AMessage> meta = packetSource->getLatestDequeuedMeta();
+ mStreamMask = mNewStreamMask;
+ mSwitchInProgress = false;
+ mOrigBandwidthIndex = mCurBandwidthIndex;
- if (meta != NULL && meta->findInt32("targetDuration", &targetDuration) ) {
- int64_t bufferedDurationUs = packetSource->getEstimatedDurationUs();
- int64_t targetDurationUs = targetDuration * 1000000ll;
+ restartPollBuffering();
+}
- if (bufferedDurationUs < targetDurationUs / 3) {
- (new AMessage(kWhatSwitchDown, id()))->post();
- break;
- }
- }
- }
+void LiveSession::schedulePollBuffering() {
+ sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+ msg->setInt32("generation", mPollBufferingGeneration);
+ msg->post(1000000ll);
+}
- mSwitchDownMonitor->post(1000000ll);
+void LiveSession::cancelPollBuffering() {
+ ++mPollBufferingGeneration;
+ mPrevBufferPercentage = -1;
}
-void LiveSession::onSwitchDown() {
- if (mReconfigurationInProgress || mSwitchInProgress || mCurBandwidthIndex == 0) {
- return;
- }
+void LiveSession::restartPollBuffering() {
+ cancelPollBuffering();
+ onPollBuffering();
+}
- ssize_t bandwidthIndex = getBandwidthIndex();
- if (bandwidthIndex < mCurBandwidthIndex) {
- changeConfiguration(-1, bandwidthIndex, false);
- return;
+void LiveSession::onPollBuffering() {
+ ALOGV("onPollBuffering: mSwitchInProgress %d, mReconfigurationInProgress %d, "
+ "mInPreparationPhase %d, mCurBandwidthIndex %zd, mStreamMask 0x%x",
+ mSwitchInProgress, mReconfigurationInProgress,
+ mInPreparationPhase, mCurBandwidthIndex, mStreamMask);
+
+ bool underflow, ready, down, up;
+ if (checkBuffering(underflow, ready, down, up)) {
+ if (mInPreparationPhase) {
+ // Allow down switch even if we're still preparing.
+ //
+ // Some streams have a high bandwidth index as default,
+ // when bandwidth is low, it takes a long time to buffer
+ // to ready mark, then it immediately pauses after start
+ // as we have to do a down switch. It's better experience
+ // to restart from a lower index, if we detect low bw.
+ if (!switchBandwidthIfNeeded(false /* up */, down) && ready) {
+ postPrepared(OK);
+ }
+ }
+
+ if (!mInPreparationPhase) {
+ if (ready) {
+ stopBufferingIfNecessary();
+ } else if (underflow) {
+ startBufferingIfNecessary();
+ }
+ switchBandwidthIfNeeded(up, down);
+ }
}
+ schedulePollBuffering();
}
-// Mark switch done when:
-// 1. all old buffers are swapped out
-void LiveSession::tryToFinishBandwidthSwitch() {
+void LiveSession::cancelBandwidthSwitch(bool resume) {
+ ALOGV("cancelBandwidthSwitch: mSwitchGen(%d)++, orig %zd, cur %zd",
+ mSwitchGeneration, mOrigBandwidthIndex, mCurBandwidthIndex);
if (!mSwitchInProgress) {
return;
}
- bool needToRemoveFetchers = false;
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- if (mFetcherInfos.valueAt(i).mToBeRemoved) {
- needToRemoveFetchers = true;
- break;
+ FetcherInfo& info = mFetcherInfos.editValueAt(i);
+ if (info.mToBeRemoved) {
+ info.mToBeRemoved = false;
+ if (resume) {
+ resumeFetcher(mFetcherInfos.keyAt(i), mSwapMask);
+ }
}
}
- if (!needToRemoveFetchers && mSwapMask == 0) {
- ALOGI("mSwitchInProgress = false");
- mStreamMask = mNewStreamMask;
- mSwitchInProgress = false;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ AString newUri = mStreams[i].mNewUri;
+ if (!newUri.empty()) {
+ // clear all mNewUri matching this newUri
+ for (size_t j = i; j < kMaxStreams; ++j) {
+ if (mStreams[j].mNewUri == newUri) {
+ mStreams[j].mNewUri.clear();
+ }
+ }
+ ALOGV("stopping newUri = %s", newUri.c_str());
+ ssize_t index = mFetcherInfos.indexOfKey(newUri);
+ if (index < 0) {
+ ALOGE("did not find fetcher for newUri: %s", newUri.c_str());
+ continue;
+ }
+ FetcherInfo &info = mFetcherInfos.editValueAt(index);
+ info.mToBeRemoved = true;
+ info.mFetcher->stopAsync();
+ }
}
-}
-
-void LiveSession::scheduleCheckBandwidthEvent() {
- sp<AMessage> msg = new AMessage(kWhatCheckBandwidth, id());
- msg->setInt32("generation", mCheckBandwidthGeneration);
- msg->post(10000000ll);
-}
-void LiveSession::cancelCheckBandwidthEvent() {
- ++mCheckBandwidthGeneration;
-}
+ ALOGI("#### Canceled Bandwidth Switch: %zd => %zd",
+ mOrigBandwidthIndex, mCurBandwidthIndex);
-void LiveSession::cancelBandwidthSwitch() {
- Mutex::Autolock lock(mSwapMutex);
mSwitchGeneration++;
mSwitchInProgress = false;
+ mCurBandwidthIndex = mOrigBandwidthIndex;
mSwapMask = 0;
+}
- for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- FetcherInfo& info = mFetcherInfos.editValueAt(i);
- if (info.mToBeRemoved) {
- info.mToBeRemoved = false;
- }
+bool LiveSession::checkBuffering(
+ bool &underflow, bool &ready, bool &down, bool &up) {
+ underflow = ready = down = up = false;
+
+ if (mReconfigurationInProgress) {
+ ALOGV("Switch/Reconfig in progress, defer buffer polling");
+ return false;
}
- for (size_t i = 0; i < kMaxStreams; ++i) {
- if (!mStreams[i].mNewUri.empty()) {
- ssize_t j = mFetcherInfos.indexOfKey(mStreams[i].mNewUri);
- if (j < 0) {
- mStreams[i].mNewUri.clear();
- continue;
+ size_t activeCount, underflowCount, readyCount, downCount, upCount;
+ activeCount = underflowCount = readyCount = downCount = upCount =0;
+ int32_t minBufferPercent = -1;
+ int64_t durationUs;
+ if (getDuration(&durationUs) != OK) {
+ durationUs = -1;
+ }
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ // we don't check subtitles for buffering level
+ if (!(mStreamMask & mPacketSources.keyAt(i)
+ & (STREAMTYPE_AUDIO | STREAMTYPE_VIDEO))) {
+ continue;
+ }
+ // ignore streams that never had any packet queued.
+ // (it's possible that the variant only has audio or video)
+ sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta();
+ if (meta == NULL) {
+ continue;
+ }
+
+ int64_t bufferedDurationUs =
+ mPacketSources[i]->getEstimatedDurationUs();
+ ALOGV("[%s] buffered %lld us",
+ getNameForStream(mPacketSources.keyAt(i)),
+ (long long)bufferedDurationUs);
+ if (durationUs >= 0) {
+ int32_t percent;
+ if (mPacketSources[i]->isFinished(0 /* duration */)) {
+ percent = 100;
+ } else {
+ percent = (int32_t)(100.0 *
+ (mLastDequeuedTimeUs + bufferedDurationUs) / durationUs);
+ }
+ if (minBufferPercent < 0 || percent < minBufferPercent) {
+ minBufferPercent = percent;
}
+ }
- const FetcherInfo &info = mFetcherInfos.valueAt(j);
- info.mFetcher->stopAsync();
- mFetcherInfos.removeItemsAt(j);
- mStreams[i].mNewUri.clear();
+ ++activeCount;
+ int64_t readyMark = mInPreparationPhase ? kPrepareMarkUs : kReadyMarkUs;
+ if (bufferedDurationUs > readyMark
+ || mPacketSources[i]->isFinished(0)) {
+ ++readyCount;
+ }
+ if (!mPacketSources[i]->isFinished(0)) {
+ if (bufferedDurationUs < kUnderflowMarkUs) {
+ ++underflowCount;
+ }
+ if (bufferedDurationUs > mUpSwitchMark) {
+ ++upCount;
+ }
+ if (bufferedDurationUs < mDownSwitchMark) {
+ ++downCount;
+ }
}
}
-}
-bool LiveSession::canSwitchBandwidthTo(size_t bandwidthIndex) {
- if (mReconfigurationInProgress || mSwitchInProgress) {
- return false;
+ if (minBufferPercent >= 0) {
+ notifyBufferingUpdate(minBufferPercent);
}
- if (mCurBandwidthIndex < 0) {
+ if (activeCount > 0) {
+ up = (upCount == activeCount);
+ down = (downCount > 0);
+ ready = (readyCount == activeCount);
+ underflow = (underflowCount > 0);
return true;
}
- if (bandwidthIndex == (size_t)mCurBandwidthIndex) {
+ return false;
+}
+
+void LiveSession::startBufferingIfNecessary() {
+ ALOGV("startBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d",
+ mInPreparationPhase, mBuffering);
+ if (!mBuffering) {
+ mBuffering = true;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingStart);
+ notify->post();
+ }
+}
+
+void LiveSession::stopBufferingIfNecessary() {
+ ALOGV("stopBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d",
+ mInPreparationPhase, mBuffering);
+
+ if (mBuffering) {
+ mBuffering = false;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingEnd);
+ notify->post();
+ }
+}
+
+void LiveSession::notifyBufferingUpdate(int32_t percentage) {
+ if (percentage < mPrevBufferPercentage) {
+ percentage = mPrevBufferPercentage;
+ } else if (percentage > 100) {
+ percentage = 100;
+ }
+
+ mPrevBufferPercentage = percentage;
+
+ ALOGV("notifyBufferingUpdate: percentage=%d%%", percentage);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingUpdate);
+ notify->setInt32("percentage", percentage);
+ notify->post();
+}
+
+/*
+ * returns true if a bandwidth switch is actually needed (and started),
+ * returns false otherwise
+ */
+bool LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) {
+ // no need to check bandwidth if we only have 1 bandwidth settings
+ if (mSwitchInProgress || mBandwidthItems.size() < 2) {
return false;
- } else if (bandwidthIndex > (size_t)mCurBandwidthIndex) {
- return canSwitchUp();
+ }
+
+ int32_t bandwidthBps;
+ if (mBandwidthEstimator->estimateBandwidth(&bandwidthBps)) {
+ ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+ mLastBandwidthBps = bandwidthBps;
} else {
- return true;
+ ALOGV("no bandwidth estimate.");
+ return false;
}
+
+ int32_t curBandwidth = mBandwidthItems.itemAt(mCurBandwidthIndex).mBandwidth;
+ // canSwithDown and canSwitchUp can't both be true.
+ // we only want to switch up when measured bw is 120% higher than current variant,
+ // and we only want to switch down when measured bw is below current variant.
+ bool canSwithDown = bufferLow
+ && (bandwidthBps < (int32_t)curBandwidth);
+ bool canSwitchUp = bufferHigh
+ && (bandwidthBps > (int32_t)curBandwidth * 12 / 10);
+
+ if (canSwithDown || canSwitchUp) {
+ ssize_t bandwidthIndex = getBandwidthIndex(bandwidthBps);
+
+ // it's possible that we're checking for canSwitchUp case, but the returned
+ // bandwidthIndex is < mCurBandwidthIndex, as getBandwidthIndex() only uses 70%
+ // of measured bw. In that case we don't want to do anything, since we have
+ // both enough buffer and enough bw.
+ if ((canSwitchUp && bandwidthIndex > mCurBandwidthIndex)
+ || (canSwithDown && bandwidthIndex < mCurBandwidthIndex)) {
+ // if not yet prepared, just restart again with new bw index.
+ // this is faster and playback experience is cleaner.
+ changeConfiguration(
+ mInPreparationPhase ? 0 : -1ll, bandwidthIndex);
+ return true;
+ }
+ }
+ return false;
}
-void LiveSession::onCheckBandwidth(const sp<AMessage> &msg) {
- size_t bandwidthIndex = getBandwidthIndex();
- if (canSwitchBandwidthTo(bandwidthIndex)) {
- changeConfiguration(-1ll /* timeUs */, bandwidthIndex);
- } else {
- // Come back and check again 10 seconds later in case there is nothing to do now.
- // If we DO change configuration, once that completes it'll schedule a new
- // check bandwidth event with an incremented mCheckBandwidthGeneration.
- msg->post(10000000ll);
+void LiveSession::postError(status_t err) {
+ // if we reached EOS, notify buffering of 100%
+ if (err == ERROR_END_OF_STREAM) {
+ notifyBufferingUpdate(100);
}
+ // we'll stop buffer polling now, before that notify
+ // stop buffering to stop the spinning icon
+ stopBufferingIfNecessary();
+ cancelPollBuffering();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
}
void LiveSession::postPrepared(status_t err) {
@@ -1764,6 +2262,8 @@ void LiveSession::postPrepared(status_t err) {
if (err == OK || err == ERROR_END_OF_STREAM) {
notify->setInt32("what", kWhatPrepared);
} else {
+ cancelPollBuffering();
+
notify->setInt32("what", kWhatPreparationFailed);
notify->setInt32("err", err);
}
@@ -1771,10 +2271,8 @@ void LiveSession::postPrepared(status_t err) {
notify->post();
mInPreparationPhase = false;
-
- mSwitchDownMonitor = new AMessage(kWhatCheckSwitchDown, id());
- mSwitchDownMonitor->post();
}
+
} // namespace android
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 2d3a25a..ed74bc2 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -26,6 +26,7 @@
namespace android {
struct ABuffer;
+struct AReplyToken;
struct AnotherPacketSource;
struct DataSource;
struct HTTPBase;
@@ -33,16 +34,13 @@ struct IMediaHTTPService;
struct LiveDataSource;
struct M3UParser;
struct PlaylistFetcher;
+struct HLSTime;
struct LiveSession : public AHandler {
enum Flags {
// Don't log any URLs.
kFlagIncognito = 1,
};
- LiveSession(
- const sp<AMessage> &notify,
- uint32_t flags,
- const sp<IMediaHTTPService> &httpService);
enum StreamIndex {
kAudioIndex = 0,
@@ -56,10 +54,24 @@ struct LiveSession : public AHandler {
STREAMTYPE_VIDEO = 1 << kVideoIndex,
STREAMTYPE_SUBTITLES = 1 << kSubtitleIndex,
};
+
+ enum SeekMode {
+ kSeekModeExactPosition = 0, // used for seeking
+ kSeekModeNextSample = 1, // used for seamless switching
+ kSeekModeNextSegment = 2, // used for seamless switching
+ };
+
+ LiveSession(
+ const sp<AMessage> &notify,
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService);
+
status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit);
status_t getStreamFormat(StreamType stream, sp<AMessage> *format);
+ sp<HTTPBase> getHTTPDataSource();
+
void connectAsync(
const char *url,
const KeyedVector<String8, String8> *headers = NULL);
@@ -78,18 +90,19 @@ struct LiveSession : public AHandler {
bool isSeekable() const;
bool hasDynamicDuration() const;
+ static const char *getKeyForStream(StreamType type);
+ static const char *getNameForStream(StreamType type);
+
enum {
kWhatStreamsChanged,
kWhatError,
kWhatPrepared,
kWhatPreparationFailed,
+ kWhatBufferingStart,
+ kWhatBufferingEnd,
+ kWhatBufferingUpdate,
};
- // create a format-change discontinuity
- //
- // swap:
- // whether is format-change discontinuity should trigger a buffer swap
- sp<ABuffer> createFormatChangeBuffer(bool swap = true);
protected:
virtual ~LiveSession();
@@ -103,18 +116,25 @@ private:
kWhatDisconnect = 'disc',
kWhatSeek = 'seek',
kWhatFetcherNotify = 'notf',
- kWhatCheckBandwidth = 'bndw',
kWhatChangeConfiguration = 'chC0',
kWhatChangeConfiguration2 = 'chC2',
kWhatChangeConfiguration3 = 'chC3',
kWhatFinishDisconnect2 = 'fin2',
- kWhatSwapped = 'swap',
- kWhatCheckSwitchDown = 'ckSD',
- kWhatSwitchDown = 'sDwn',
+ kWhatPollBuffering = 'poll',
};
- static const size_t kBandwidthHistoryBytes;
+ // Bandwidth Switch Mark Defaults
+ static const int64_t kUpSwitchMarkUs;
+ static const int64_t kDownSwitchMarkUs;
+ static const int64_t kUpSwitchMarginUs;
+ static const int64_t kResumeThresholdUs;
+
+ // Buffer Prepare/Ready/Underflow Marks
+ static const int64_t kReadyMarkUs;
+ static const int64_t kPrepareMarkUs;
+ static const int64_t kUnderflowMarkUs;
+ struct BandwidthEstimator;
struct BandwidthItem {
size_t mPlaylistIndex;
unsigned long mBandwidth;
@@ -123,23 +143,22 @@ private:
struct FetcherInfo {
sp<PlaylistFetcher> mFetcher;
int64_t mDurationUs;
- bool mIsPrepared;
bool mToBeRemoved;
+ bool mToBeResumed;
};
struct StreamItem {
const char *mType;
AString mUri, mNewUri;
+ SeekMode mSeekMode;
size_t mCurDiscontinuitySeq;
int64_t mLastDequeuedTimeUs;
int64_t mLastSampleDurationUs;
StreamItem()
- : mType(""),
- mCurDiscontinuitySeq(0),
- mLastDequeuedTimeUs(0),
- mLastSampleDurationUs(0) {}
+ : StreamItem("") {}
StreamItem(const char *type)
: mType(type),
+ mSeekMode(kSeekModeExactPosition),
mCurDiscontinuitySeq(0),
mLastDequeuedTimeUs(0),
mLastSampleDurationUs(0) {}
@@ -155,8 +174,10 @@ private:
uint32_t mFlags;
sp<IMediaHTTPService> mHTTPService;
+ bool mBuffering;
bool mInPreparationPhase;
- bool mBuffering[kMaxStreams];
+ int32_t mPollBufferingGeneration;
+ int32_t mPrevBufferPercentage;
sp<HTTPBase> mHTTPDataSource;
KeyedVector<String8, String8> mExtraHeaders;
@@ -165,9 +186,15 @@ private:
Vector<BandwidthItem> mBandwidthItems;
ssize_t mCurBandwidthIndex;
+ ssize_t mOrigBandwidthIndex;
+ int32_t mLastBandwidthBps;
+ sp<BandwidthEstimator> mBandwidthEstimator;
sp<M3UParser> mPlaylist;
+ int32_t mMaxWidth;
+ int32_t mMaxHeight;
+ sp<ALooper> mFetcherLooper;
KeyedVector<AString, FetcherInfo> mFetcherInfos;
uint32_t mStreamMask;
@@ -180,17 +207,10 @@ private:
// we use this to track reconfiguration progress.
uint32_t mSwapMask;
- KeyedVector<StreamType, sp<AnotherPacketSource> > mDiscontinuities;
KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources;
// A second set of packet sources that buffer content for the variant we're switching to.
KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources2;
- // A mutex used to serialize two sets of events:
- // * the swapping of packet sources in dequeueAccessUnit on the player thread, AND
- // * a forced bandwidth switch termination in cancelSwitch on the live looper.
- Mutex mSwapMutex;
-
- int32_t mCheckBandwidthGeneration;
int32_t mSwitchGeneration;
int32_t mSubtitleGeneration;
@@ -203,20 +223,23 @@ private:
bool mReconfigurationInProgress;
bool mSwitchInProgress;
- uint32_t mDisconnectReplyID;
- uint32_t mSeekReplyID;
+ int64_t mUpSwitchMark;
+ int64_t mDownSwitchMark;
+ int64_t mUpSwitchMargin;
+
+ sp<AReplyToken> mDisconnectReplyID;
+ sp<AReplyToken> mSeekReplyID;
bool mFirstTimeUsValid;
int64_t mFirstTimeUs;
int64_t mLastSeekTimeUs;
- sp<AMessage> mSwitchDownMonitor;
KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs;
KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs;
sp<PlaylistFetcher> addFetcher(const char *uri);
void onConnect(const sp<AMessage> &msg);
- status_t onSeek(const sp<AMessage> &msg);
+ void onSeek(const sp<AMessage> &msg);
void onFinishDisconnect2();
// If given a non-zero block_size (default 0), it is used to cap the number of
@@ -238,45 +261,54 @@ private:
uint32_t block_size = 0,
/* reuse DataSource if doing partial fetch */
sp<DataSource> *source = NULL,
- String8 *actualUrl = NULL);
+ String8 *actualUrl = NULL,
+ /* force connect http even when resuing DataSource */
+ bool forceConnectHTTP = false);
sp<M3UParser> fetchPlaylist(
const char *url, uint8_t *curPlaylistHash, bool *unchanged);
- size_t getBandwidthIndex();
- int64_t latestMediaSegmentStartTimeUs();
+ bool resumeFetcher(
+ const AString &uri, uint32_t streamMask,
+ int64_t timeUs = -1ll, bool newUri = false);
+
+ float getAbortThreshold(
+ ssize_t currentBWIndex, ssize_t targetBWIndex) const;
+ void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
+ size_t getBandwidthIndex(int32_t bandwidthBps);
+ HLSTime latestMediaSegmentStartTime() const;
static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
static StreamType indexToType(int idx);
static ssize_t typeToIndex(int32_t type);
void changeConfiguration(
- int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false);
+ int64_t timeUs, ssize_t bwIndex = -1, bool pickTrack = false);
void onChangeConfiguration(const sp<AMessage> &msg);
void onChangeConfiguration2(const sp<AMessage> &msg);
void onChangeConfiguration3(const sp<AMessage> &msg);
- void onSwapped(const sp<AMessage> &msg);
- void onCheckSwitchDown();
- void onSwitchDown();
- void tryToFinishBandwidthSwitch();
-
- void scheduleCheckBandwidthEvent();
- void cancelCheckBandwidthEvent();
-
- // cancelBandwidthSwitch is atomic wrt swapPacketSource; call it to prevent packet sources
- // from being swapped out on stale discontinuities while manipulating
- // mPacketSources/mPacketSources2.
- void cancelBandwidthSwitch();
- bool canSwitchBandwidthTo(size_t bandwidthIndex);
- void onCheckBandwidth(const sp<AMessage> &msg);
+ void swapPacketSource(StreamType stream);
+ void tryToFinishBandwidthSwitch(const AString &oldUri);
+ void cancelBandwidthSwitch(bool resume = false);
+ bool checkSwitchProgress(
+ sp<AMessage> &msg, int64_t delayUs, bool *needResumeUntil);
+
+ bool switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow);
+
+ void schedulePollBuffering();
+ void cancelPollBuffering();
+ void restartPollBuffering();
+ void onPollBuffering();
+ bool checkBuffering(bool &underflow, bool &ready, bool &down, bool &up);
+ void startBufferingIfNecessary();
+ void stopBufferingIfNecessary();
+ void notifyBufferingUpdate(int32_t percentage);
void finishDisconnect();
void postPrepared(status_t err);
-
- void swapPacketSource(StreamType stream);
- bool canSwitchUp();
+ void postError(status_t err);
DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
};
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 997b694..ef9145c 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -251,6 +251,7 @@ M3UParser::M3UParser(
mIsComplete(false),
mIsEvent(false),
mDiscontinuitySeq(0),
+ mDiscontinuityCount(0),
mSelectedIndex(-1) {
mInitCheck = parse(data, size);
}
@@ -394,7 +395,9 @@ ssize_t M3UParser::getSelectedTrack(media_track_type type) const {
bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
if (!mIsVariantPlaylist) {
- *uri = mBaseURI;
+ if (uri != NULL) {
+ *uri = mBaseURI;
+ }
// Assume media without any more specific attribute contains
// audio and video, but no subtitles.
@@ -407,7 +410,9 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
AString groupID;
if (!meta->findString(key, &groupID)) {
- *uri = mItems.itemAt(index).mURI;
+ if (uri != NULL) {
+ *uri = mItems.itemAt(index).mURI;
+ }
AString codecs;
if (!meta->findString("codecs", &codecs)) {
@@ -433,18 +438,26 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
}
}
- sp<MediaGroup> group = mMediaGroups.valueFor(groupID);
- if (!group->getActiveURI(uri)) {
- return false;
- }
+ // if uri == NULL, we're only checking if the type is present,
+ // don't care about the active URI (or if there is an active one)
+ if (uri != NULL) {
+ sp<MediaGroup> group = mMediaGroups.valueFor(groupID);
+ if (!group->getActiveURI(uri)) {
+ return false;
+ }
- if ((*uri).empty()) {
- *uri = mItems.itemAt(index).mURI;
+ if ((*uri).empty()) {
+ *uri = mItems.itemAt(index).mURI;
+ }
}
return true;
}
+bool M3UParser::hasType(size_t index, const char *key) const {
+ return getTypeURI(index, key, NULL /* uri */);
+}
+
static bool MakeURL(const char *baseURL, const char *url, AString *out) {
out->clear();
@@ -582,6 +595,7 @@ status_t M3UParser::parse(const void *_data, size_t size) {
itemMeta = new AMessage;
}
itemMeta->setInt32("discontinuity", true);
+ ++mDiscontinuityCount;
} else if (line.startsWith("#EXT-X-STREAM-INF")) {
if (mMeta != NULL) {
return ERROR_MALFORMED;
@@ -609,6 +623,9 @@ status_t M3UParser::parse(const void *_data, size_t size) {
} else if (line.startsWith("#EXT-X-MEDIA")) {
err = parseMedia(line);
} else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) {
+ if (mIsVariantPlaylist) {
+ return ERROR_MALFORMED;
+ }
size_t seq;
err = parseDiscontinuitySequence(line, &seq);
if (err == OK) {
@@ -628,6 +645,8 @@ status_t M3UParser::parse(const void *_data, size_t size) {
|| !itemMeta->findInt64("durationUs", &durationUs)) {
return ERROR_MALFORMED;
}
+ itemMeta->setInt32("discontinuity-sequence",
+ mDiscontinuitySeq + mDiscontinuityCount);
}
mItems.push();
@@ -644,6 +663,14 @@ status_t M3UParser::parse(const void *_data, size_t size) {
++lineNo;
}
+ // error checking of all fields that's required to appear once
+ // (currently only checking "target-duration")
+ int32_t targetDurationSecs;
+ if (!mIsVariantPlaylist && (mMeta == NULL || !mMeta->findInt32(
+ "target-duration", &targetDurationSecs))) {
+ return ERROR_MALFORMED;
+ }
+
return OK;
}
@@ -781,6 +808,29 @@ status_t M3UParser::parseStreamInf(
*meta = new AMessage;
}
(*meta)->setString(key.c_str(), codecs.c_str());
+ } else if (!strcasecmp("resolution", key.c_str())) {
+ const char *s = val.c_str();
+ char *end;
+ unsigned long width = strtoul(s, &end, 10);
+
+ if (end == s || *end != 'x') {
+ // malformed
+ continue;
+ }
+
+ s = end + 1;
+ unsigned long height = strtoul(s, &end, 10);
+
+ if (end == s || *end != '\0') {
+ // malformed
+ continue;
+ }
+
+ if (meta->get() == NULL) {
+ *meta = new AMessage;
+ }
+ (*meta)->setInt32("width", width);
+ (*meta)->setInt32("height", height);
} else if (!strcasecmp("audio", key.c_str())
|| !strcasecmp("video", key.c_str())
|| !strcasecmp("subtitles", key.c_str())) {
diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h
index 1cad060..fef361f 100644
--- a/media/libstagefright/httplive/M3UParser.h
+++ b/media/libstagefright/httplive/M3UParser.h
@@ -50,6 +50,7 @@ struct M3UParser : public RefBase {
ssize_t getSelectedTrack(media_track_type /* type */) const;
bool getTypeURI(size_t index, const char *key, AString *uri) const;
+ bool hasType(size_t index, const char *key) const;
protected:
virtual ~M3UParser();
@@ -70,6 +71,7 @@ private:
bool mIsComplete;
bool mIsEvent;
size_t mDiscontinuitySeq;
+ int32_t mDiscontinuityCount;
sp<AMessage> mMeta;
Vector<Item> mItems;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 1227600..ce79cc2 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaDefs.h>
@@ -44,24 +45,114 @@
#include <openssl/aes.h>
#include <openssl/md5.h>
+#define FLOGV(fmt, ...) ALOGV("[fetcher-%d] " fmt, mFetcherID, ##__VA_ARGS__)
+#define FSLOGV(stream, fmt, ...) ALOGV("[fetcher-%d] [%s] " fmt, mFetcherID, \
+ LiveSession::getNameForStream(stream), ##__VA_ARGS__)
+
namespace android {
// static
-const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll;
+const int64_t PlaylistFetcher::kMinBufferedDurationUs = 30000000ll;
const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll;
// LCM of 188 (size of a TS packet) & 1k works well
const int32_t PlaylistFetcher::kDownloadBlockSize = 47 * 1024;
-const int32_t PlaylistFetcher::kNumSkipFrames = 5;
+
+struct PlaylistFetcher::DownloadState : public RefBase {
+ DownloadState();
+ void resetState();
+ bool hasSavedState() const;
+ void restoreState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
+ void saveState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
+
+private:
+ bool mHasSavedState;
+ AString mUri;
+ sp<AMessage> mItemMeta;
+ sp<ABuffer> mBuffer;
+ sp<ABuffer> mTsBuffer;
+ int32_t mFirstSeqNumberInPlaylist;
+ int32_t mLastSeqNumberInPlaylist;
+};
+
+PlaylistFetcher::DownloadState::DownloadState() {
+ resetState();
+}
+
+bool PlaylistFetcher::DownloadState::hasSavedState() const {
+ return mHasSavedState;
+}
+
+void PlaylistFetcher::DownloadState::resetState() {
+ mHasSavedState = false;
+
+ mUri.clear();
+ mItemMeta = NULL;
+ mBuffer = NULL;
+ mTsBuffer = NULL;
+ mFirstSeqNumberInPlaylist = 0;
+ mLastSeqNumberInPlaylist = 0;
+}
+
+void PlaylistFetcher::DownloadState::restoreState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
+ if (!mHasSavedState) {
+ return;
+ }
+
+ uri = mUri;
+ itemMeta = mItemMeta;
+ buffer = mBuffer;
+ tsBuffer = mTsBuffer;
+ firstSeqNumberInPlaylist = mFirstSeqNumberInPlaylist;
+ lastSeqNumberInPlaylist = mLastSeqNumberInPlaylist;
+
+ resetState();
+}
+
+void PlaylistFetcher::DownloadState::saveState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
+ mHasSavedState = true;
+
+ mUri = uri;
+ mItemMeta = itemMeta;
+ mBuffer = buffer;
+ mTsBuffer = tsBuffer;
+ mFirstSeqNumberInPlaylist = firstSeqNumberInPlaylist;
+ mLastSeqNumberInPlaylist = lastSeqNumberInPlaylist;
+}
PlaylistFetcher::PlaylistFetcher(
const sp<AMessage> &notify,
const sp<LiveSession> &session,
const char *uri,
+ int32_t id,
int32_t subtitleGeneration)
: mNotify(notify),
- mStartTimeUsNotify(notify->dup()),
mSession(session),
mURI(uri),
+ mFetcherID(id),
mStreamTypeMask(0),
mStartTimeUs(-1ll),
mSegmentStartTimeUs(-1ll),
@@ -71,23 +162,30 @@ PlaylistFetcher::PlaylistFetcher(
mSeqNumber(-1),
mNumRetries(0),
mStartup(true),
- mAdaptive(false),
- mPrepared(false),
+ mIDRFound(false),
+ mSeekMode(LiveSession::kSeekModeExactPosition),
+ mTimeChangeSignaled(false),
mNextPTSTimeUs(-1ll),
mMonitorQueueGeneration(0),
mSubtitleGeneration(subtitleGeneration),
+ mLastDiscontinuitySeq(-1ll),
mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),
mFirstPTSValid(false),
- mAbsoluteTimeAnchorUs(0ll),
- mVideoBuffer(new AnotherPacketSource(NULL)) {
+ mFirstTimeUs(-1ll),
+ mVideoBuffer(new AnotherPacketSource(NULL)),
+ mThresholdRatio(-1.0f),
+ mDownloadState(new DownloadState()) {
memset(mPlaylistHash, 0, sizeof(mPlaylistHash));
- mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
- mStartTimeUsNotify->setInt32("streamMask", 0);
+ mHTTPDataSource = mSession->getHTTPDataSource();
}
PlaylistFetcher::~PlaylistFetcher() {
}
+int32_t PlaylistFetcher::getFetcherID() const {
+ return mFetcherID;
+}
+
int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {
CHECK(mPlaylist != NULL);
@@ -119,6 +217,32 @@ int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {
return segmentStartUs;
}
+int64_t PlaylistFetcher::getSegmentDurationUs(int32_t seqNumber) const {
+ CHECK(mPlaylist != NULL);
+
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+
+ int32_t lastSeqNumberInPlaylist =
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+
+ CHECK_GE(seqNumber, firstSeqNumberInPlaylist);
+ CHECK_LE(seqNumber, lastSeqNumberInPlaylist);
+
+ int32_t index = seqNumber - firstSeqNumberInPlaylist;
+ sp<AMessage> itemMeta;
+ CHECK(mPlaylist->itemAt(
+ index, NULL /* uri */, &itemMeta));
+
+ int64_t itemDurationUs;
+ CHECK(itemMeta->findInt64("durationUs", &itemDurationUs));
+
+ return itemDurationUs;
+}
+
int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {
int64_t nowUs = ALooper::GetNowUs();
@@ -322,10 +446,10 @@ void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) {
maxDelayUs = minDelayUs;
}
if (delayUs > maxDelayUs) {
- ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs);
+ FLOGV("Need to refresh playlist in %lld", (long long)maxDelayUs);
delayUs = maxDelayUs;
}
- sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
+ sp<AMessage> msg = new AMessage(kWhatMonitorQueue, this);
msg->setInt32("generation", mMonitorQueueGeneration);
msg->post(delayUs);
}
@@ -334,6 +458,14 @@ void PlaylistFetcher::cancelMonitorQueue() {
++mMonitorQueueGeneration;
}
+void PlaylistFetcher::setStoppingThreshold(float thresholdRatio) {
+ AutoMutex _l(mThresholdLock);
+ if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
+ return;
+ }
+ mThresholdRatio = thresholdRatio;
+}
+
void PlaylistFetcher::startAsync(
const sp<AnotherPacketSource> &audioSource,
const sp<AnotherPacketSource> &videoSource,
@@ -341,8 +473,8 @@ void PlaylistFetcher::startAsync(
int64_t startTimeUs,
int64_t segmentStartTimeUs,
int32_t startDiscontinuitySeq,
- bool adaptive) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
+ LiveSession::SeekMode seekMode) {
+ sp<AMessage> msg = new AMessage(kWhatStart, this);
uint32_t streamTypeMask = 0ul;
@@ -365,22 +497,29 @@ void PlaylistFetcher::startAsync(
msg->setInt64("startTimeUs", startTimeUs);
msg->setInt64("segmentStartTimeUs", segmentStartTimeUs);
msg->setInt32("startDiscontinuitySeq", startDiscontinuitySeq);
- msg->setInt32("adaptive", adaptive);
+ msg->setInt32("seekMode", seekMode);
msg->post();
}
-void PlaylistFetcher::pauseAsync() {
- (new AMessage(kWhatPause, id()))->post();
+void PlaylistFetcher::pauseAsync(float thresholdRatio) {
+ if (thresholdRatio >= 0.0f) {
+ setStoppingThreshold(thresholdRatio);
+ }
+ (new AMessage(kWhatPause, this))->post();
}
void PlaylistFetcher::stopAsync(bool clear) {
- sp<AMessage> msg = new AMessage(kWhatStop, id());
+ setStoppingThreshold(0.0f);
+
+ sp<AMessage> msg = new AMessage(kWhatStop, this);
msg->setInt32("clear", clear);
msg->post();
}
void PlaylistFetcher::resumeUntilAsync(const sp<AMessage> &params) {
- AMessage* msg = new AMessage(kWhatResumeUntil, id());
+ FLOGV("resumeUntilAsync: params=%s", params->debugString().c_str());
+
+ AMessage* msg = new AMessage(kWhatResumeUntil, this);
msg->setMessage("params", params);
msg->post();
}
@@ -404,6 +543,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatPaused);
+ notify->setInt32("seekMode",
+ mDownloadState->hasSavedState()
+ ? LiveSession::kSeekModeNextSample
+ : LiveSession::kSeekModeNextSegment);
notify->post();
break;
}
@@ -450,6 +593,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
mPacketSources.clear();
+ mStopParams.clear();
+ mStartTimeUsNotify = mNotify->dup();
+ mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
+ mStartTimeUsNotify->setString("uri", mURI);
uint32_t streamTypeMask;
CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask));
@@ -457,11 +604,11 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
int64_t startTimeUs;
int64_t segmentStartTimeUs;
int32_t startDiscontinuitySeq;
- int32_t adaptive;
+ int32_t seekMode;
CHECK(msg->findInt64("startTimeUs", &startTimeUs));
CHECK(msg->findInt64("segmentStartTimeUs", &segmentStartTimeUs));
CHECK(msg->findInt32("startDiscontinuitySeq", &startDiscontinuitySeq));
- CHECK(msg->findInt32("adaptive", &adaptive));
+ CHECK(msg->findInt32("seekMode", &seekMode));
if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) {
void *ptr;
@@ -493,14 +640,26 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
mStreamTypeMask = streamTypeMask;
mSegmentStartTimeUs = segmentStartTimeUs;
- mDiscontinuitySeq = startDiscontinuitySeq;
+
+ if (startDiscontinuitySeq >= 0) {
+ mDiscontinuitySeq = startDiscontinuitySeq;
+ }
+
+ mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY;
+ mSeekMode = (LiveSession::SeekMode) seekMode;
+
+ if (startTimeUs >= 0 || mSeekMode == LiveSession::kSeekModeNextSample) {
+ mStartup = true;
+ mIDRFound = false;
+ mVideoBuffer->clear();
+ }
if (startTimeUs >= 0) {
mStartTimeUs = startTimeUs;
+ mFirstPTSValid = false;
mSeqNumber = -1;
- mStartup = true;
- mPrepared = false;
- mAdaptive = adaptive;
+ mTimeChangeSignaled = false;
+ mDownloadState->resetState();
}
postMonitorQueue();
@@ -510,6 +669,9 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
void PlaylistFetcher::onPause() {
cancelMonitorQueue();
+ mLastDiscontinuitySeq = mDiscontinuitySeq;
+
+ setStoppingThreshold(-1.0f);
}
void PlaylistFetcher::onStop(const sp<AMessage> &msg) {
@@ -524,8 +686,14 @@ void PlaylistFetcher::onStop(const sp<AMessage> &msg) {
}
}
+ // close off the connection after use
+ mHTTPDataSource->disconnect();
+
+ mDownloadState->resetState();
mPacketSources.clear();
mStreamTypeMask = 0;
+
+ setStoppingThreshold(-1.0f);
}
// Resume until we have reached the boundary timestamps listed in `msg`; when
@@ -535,57 +703,18 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
sp<AMessage> params;
CHECK(msg->findMessage("params", &params));
- bool stop = false;
- for (size_t i = 0; i < mPacketSources.size(); i++) {
- sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
-
- const char *stopKey;
- int streamType = mPacketSources.keyAt(i);
- switch (streamType) {
- case LiveSession::STREAMTYPE_VIDEO:
- stopKey = "timeUsVideo";
- break;
-
- case LiveSession::STREAMTYPE_AUDIO:
- stopKey = "timeUsAudio";
- break;
-
- case LiveSession::STREAMTYPE_SUBTITLES:
- stopKey = "timeUsSubtitle";
- break;
-
- default:
- TRESPASS();
- }
-
- // Don't resume if we would stop within a resume threshold.
- int32_t discontinuitySeq;
- int64_t latestTimeUs = 0, stopTimeUs = 0;
- sp<AMessage> latestMeta = packetSource->getLatestEnqueuedMeta();
- if (latestMeta != NULL
- && latestMeta->findInt32("discontinuitySeq", &discontinuitySeq)
- && discontinuitySeq == mDiscontinuitySeq
- && latestMeta->findInt64("timeUs", &latestTimeUs)
- && params->findInt64(stopKey, &stopTimeUs)
- && stopTimeUs - latestTimeUs < resumeThreshold(latestMeta)) {
- stop = true;
- }
- }
-
- if (stop) {
- for (size_t i = 0; i < mPacketSources.size(); i++) {
- mPacketSources.valueAt(i)->queueAccessUnit(mSession->createFormatChangeBuffer());
- }
- stopAsync(/* clear = */ false);
- return OK;
- }
-
mStopParams = params;
- postMonitorQueue();
+ onDownloadNext();
return OK;
}
+void PlaylistFetcher::notifyStopReached() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatStopReached);
+ notify->post();
+}
+
void PlaylistFetcher::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
@@ -605,7 +734,12 @@ void PlaylistFetcher::queueDiscontinuity(
void PlaylistFetcher::onMonitorQueue() {
bool downloadMore = false;
- refreshPlaylist();
+
+ // in the middle of an unfinished download, delay
+ // playlist refresh as it'll change seq numbers
+ if (!mDownloadState->hasSavedState()) {
+ refreshPlaylist();
+ }
int32_t targetDurationSecs;
int64_t targetDurationUs = kMinBufferedDurationUs;
@@ -619,74 +753,66 @@ void PlaylistFetcher::onMonitorQueue() {
targetDurationUs = targetDurationSecs * 1000000ll;
}
- // buffer at least 3 times the target duration, or up to 10 seconds
- int64_t durationToBufferUs = targetDurationUs * 3;
- if (durationToBufferUs > kMinBufferedDurationUs) {
- durationToBufferUs = kMinBufferedDurationUs;
- }
-
int64_t bufferedDurationUs = 0ll;
- status_t finalResult = NOT_ENOUGH_DATA;
+ status_t finalResult = OK;
if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
sp<AnotherPacketSource> packetSource =
mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);
bufferedDurationUs =
packetSource->getBufferedDurationUs(&finalResult);
- finalResult = OK;
} else {
- // Use max stream duration to prevent us from waiting on a non-existent stream;
- // when we cannot make out from the manifest what streams are included in a playlist
- // we might assume extra streams.
+ // Use min stream duration, but ignore streams that never have any packet
+ // enqueued to prevent us from waiting on a non-existent stream;
+ // when we cannot make out from the manifest what streams are included in
+ // a playlist we might assume extra streams.
+ bufferedDurationUs = -1ll;
for (size_t i = 0; i < mPacketSources.size(); ++i) {
- if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) {
+ if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0
+ || mPacketSources[i]->getLatestEnqueuedMeta() == NULL) {
continue;
}
int64_t bufferedStreamDurationUs =
mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult);
- ALOGV("buffered %" PRId64 " for stream %d",
- bufferedStreamDurationUs, mPacketSources.keyAt(i));
- if (bufferedStreamDurationUs > bufferedDurationUs) {
+
+ FSLOGV(mPacketSources.keyAt(i), "buffered %lld", (long long)bufferedStreamDurationUs);
+
+ if (bufferedDurationUs == -1ll
+ || bufferedStreamDurationUs < bufferedDurationUs) {
bufferedDurationUs = bufferedStreamDurationUs;
}
}
+ if (bufferedDurationUs == -1ll) {
+ bufferedDurationUs = 0ll;
+ }
}
- downloadMore = (bufferedDurationUs < durationToBufferUs);
- // signal start if buffered up at least the target size
- if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) {
- mPrepared = true;
-
- ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "",
- bufferedDurationUs, targetDurationUs);
- sp<AMessage> msg = mNotify->dup();
- msg->setInt32("what", kWhatTemporarilyDoneFetching);
- msg->post();
- }
+ if (finalResult == OK && bufferedDurationUs < kMinBufferedDurationUs) {
+ FLOGV("monitoring, buffered=%lld < %lld",
+ (long long)bufferedDurationUs, (long long)kMinBufferedDurationUs);
- if (finalResult == OK && downloadMore) {
- ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "",
- bufferedDurationUs, durationToBufferUs);
// delay the next download slightly; hopefully this gives other concurrent fetchers
// a better chance to run.
// onDownloadNext();
- sp<AMessage> msg = new AMessage(kWhatDownloadNext, id());
+ sp<AMessage> msg = new AMessage(kWhatDownloadNext, this);
msg->setInt32("generation", mMonitorQueueGeneration);
msg->post(1000l);
} else {
- // Nothing to do yet, try again in a second.
+ // We'd like to maintain buffering above durationToBufferUs, so try
+ // again when buffer just about to go below durationToBufferUs
+ // (or after targetDurationUs / 2, whichever is smaller).
+ int64_t delayUs = bufferedDurationUs - kMinBufferedDurationUs + 1000000ll;
+ if (delayUs > targetDurationUs / 2) {
+ delayUs = targetDurationUs / 2;
+ }
- sp<AMessage> msg = mNotify->dup();
- msg->setInt32("what", kWhatTemporarilyDoneFetching);
- msg->post();
+ FLOGV("pausing for %lld, buffered=%lld > %lld",
+ (long long)delayUs,
+ (long long)bufferedDurationUs,
+ (long long)kMinBufferedDurationUs);
- int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2;
- ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "",
- delayUs, bufferedDurationUs, durationToBufferUs);
- // :TRICKY: need to enforce minimum delay because the delay to
- // refresh the playlist will become 0
- postMonitorQueue(delayUs, mPrepared ? targetDurationUs * 2 : 0);
+ postMonitorQueue(delayUs);
}
}
@@ -715,6 +841,13 @@ status_t PlaylistFetcher::refreshPlaylist() {
if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
updateDuration();
}
+ // Notify LiveSession to use target-duration based buffering level
+ // for up/down switch. Default LiveSession::kUpSwitchMark may not
+ // be reachable for live streams, as our max buffering amount is
+ // limited to 3 segments.
+ if (!mPlaylist->isComplete()) {
+ updateTargetDuration();
+ }
}
mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
@@ -727,10 +860,75 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {
return buffer->size() > 0 && buffer->data()[0] == 0x47;
}
-void PlaylistFetcher::onDownloadNext() {
+bool PlaylistFetcher::shouldPauseDownload() {
+ if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
+ // doesn't apply to subtitles
+ return false;
+ }
+
+ // Calculate threshold to abort current download
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ int64_t thresholdUs = -1;
+ {
+ AutoMutex _l(mThresholdLock);
+ thresholdUs = (mThresholdRatio < 0.0f) ?
+ -1ll : mThresholdRatio * targetDurationUs;
+ }
+
+ if (thresholdUs < 0) {
+ // never abort
+ return false;
+ } else if (thresholdUs == 0) {
+ // immediately abort
+ return true;
+ }
+
+ // now we have a positive thresholdUs, abort if remaining
+ // portion to download is over that threshold.
+ if (mSegmentFirstPTS < 0) {
+ // this means we haven't even find the first access unit,
+ // abort now as we must be very far away from the end.
+ return true;
+ }
+ int64_t lastEnqueueUs = mSegmentFirstPTS;
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) {
+ continue;
+ }
+ sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta();
+ int32_t type;
+ if (meta == NULL || meta->findInt32("discontinuity", &type)) {
+ continue;
+ }
+ int64_t tmpUs;
+ CHECK(meta->findInt64("timeUs", &tmpUs));
+ if (tmpUs > lastEnqueueUs) {
+ lastEnqueueUs = tmpUs;
+ }
+ }
+ lastEnqueueUs -= mSegmentFirstPTS;
+
+ FLOGV("%spausing now, thresholdUs %lld, remaining %lld",
+ targetDurationUs - lastEnqueueUs > thresholdUs ? "" : "not ",
+ (long long)thresholdUs,
+ (long long)(targetDurationUs - lastEnqueueUs));
+
+ if (targetDurationUs - lastEnqueueUs > thresholdUs) {
+ return true;
+ }
+ return false;
+}
+
+bool PlaylistFetcher::initDownloadState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
status_t err = refreshPlaylist();
- int32_t firstSeqNumberInPlaylist = 0;
- int32_t lastSeqNumberInPlaylist = 0;
+ firstSeqNumberInPlaylist = 0;
+ lastSeqNumberInPlaylist = 0;
bool discontinuity = false;
if (mPlaylist != NULL) {
@@ -746,6 +944,8 @@ void PlaylistFetcher::onDownloadNext() {
}
}
+ mSegmentFirstPTS = -1ll;
+
if (mPlaylist != NULL && mSeqNumber < 0) {
CHECK_GE(mStartTimeUs, 0ll);
@@ -764,8 +964,8 @@ void PlaylistFetcher::onDownloadNext() {
mStartTimeUs -= getSegmentStartTimeUs(mSeqNumber);
}
mStartTimeUsRelative = true;
- ALOGV("Initial sequence number for time %" PRId64 " is %d from (%d .. %d)",
- mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,
+ FLOGV("Initial sequence number for time %lld is %d from (%d .. %d)",
+ (long long)mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist);
} else {
// When adapting or track switching, mSegmentStartTimeUs (relative
@@ -773,7 +973,8 @@ void PlaylistFetcher::onDownloadNext() {
// timestamps coming from the media container) is used to determine the position
// inside a segments.
mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs);
- if (mAdaptive) {
+ if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES
+ && mSeekMode != LiveSession::kSeekModeNextSample) {
// avoid double fetch/decode
mSeqNumber += 1;
}
@@ -789,7 +990,7 @@ void PlaylistFetcher::onDownloadNext() {
if (mSeqNumber > lastSeqNumberInPlaylist) {
mSeqNumber = lastSeqNumberInPlaylist;
}
- ALOGV("Initial sequence number for live event %d from (%d .. %d)",
+ FLOGV("Initial sequence number is %d from (%d .. %d)",
mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist);
}
@@ -818,17 +1019,17 @@ void PlaylistFetcher::onDownloadNext() {
if (delayUs > kMaxMonitorDelayUs) {
delayUs = kMaxMonitorDelayUs;
}
- ALOGV("sequence number high: %d from (%d .. %d), "
- "monitor in %" PRId64 " (retry=%d)",
+ FLOGV("sequence number high: %d from (%d .. %d), "
+ "monitor in %lld (retry=%d)",
mSeqNumber, firstSeqNumberInPlaylist,
- lastSeqNumberInPlaylist, delayUs, mNumRetries);
+ lastSeqNumberInPlaylist, (long long)delayUs, mNumRetries);
postMonitorQueue(delayUs);
- return;
+ return false;
}
if (err != OK) {
notifyError(err);
- return;
+ return false;
}
// we've missed the boat, let's start 3 segments prior to the latest sequence
@@ -843,12 +1044,8 @@ void PlaylistFetcher::onDownloadNext() {
// but since the segments we are supposed to fetch have already rolled off
// the playlist, i.e. we have already missed the boat, we inevitably have to
// skip.
- for (size_t i = 0; i < mPacketSources.size(); i++) {
- sp<ABuffer> formatChange = mSession->createFormatChangeBuffer();
- mPacketSources.valueAt(i)->queueAccessUnit(formatChange);
- }
- stopAsync(/* clear = */ false);
- return;
+ notifyStopReached();
+ return false;
}
mSeqNumber = lastSeqNumberInPlaylist - 3;
if (mSeqNumber < firstSeqNumberInPlaylist) {
@@ -858,45 +1055,49 @@ void PlaylistFetcher::onDownloadNext() {
// fall through
} else {
- ALOGE("Cannot find sequence number %d in playlist "
- "(contains %d - %d)",
- mSeqNumber, firstSeqNumberInPlaylist,
- firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
+ if (mPlaylist != NULL) {
+ ALOGE("Cannot find sequence number %d in playlist "
+ "(contains %d - %d)",
+ mSeqNumber, firstSeqNumberInPlaylist,
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
- notifyError(ERROR_END_OF_STREAM);
- return;
+ notifyError(ERROR_END_OF_STREAM);
+ } else {
+ // It's possible that we were never able to download the playlist.
+ // In this case we should notify error, instead of EOS, as EOS during
+ // prepare means we succeeded in downloading everything.
+ ALOGE("Failed to download playlist!");
+ notifyError(ERROR_IO);
+ }
+
+ return false;
}
}
mNumRetries = 0;
- AString uri;
- sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(
mSeqNumber - firstSeqNumberInPlaylist,
&uri,
&itemMeta));
+ CHECK(itemMeta->findInt32("discontinuity-sequence", &mDiscontinuitySeq));
+
int32_t val;
if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
- mDiscontinuitySeq++;
+ discontinuity = true;
+ } else if (mLastDiscontinuitySeq >= 0
+ && mDiscontinuitySeq != mLastDiscontinuitySeq) {
+ // Seek jumped to a new discontinuity sequence. We need to signal
+ // a format change to decoder. Decoder needs to shutdown and be
+ // created again if seamless format change is unsupported.
+ FLOGV("saw discontinuity: mStartup %d, mLastDiscontinuitySeq %d, "
+ "mDiscontinuitySeq %d, mStartTimeUs %lld",
+ mStartup, mLastDiscontinuitySeq, mDiscontinuitySeq, (long long)mStartTimeUs);
discontinuity = true;
}
+ mLastDiscontinuitySeq = -1;
- int64_t range_offset, range_length;
- if (!itemMeta->findInt64("range-offset", &range_offset)
- || !itemMeta->findInt64("range-length", &range_length)) {
- range_offset = 0;
- range_length = -1;
- }
-
- ALOGV("fetching segment %d from (%d .. %d)",
- mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist);
-
- ALOGV("fetching '%s'", uri.c_str());
-
- sp<DataSource> source;
- sp<ABuffer> buffer, tsBuffer;
// decrypt a junk buffer to prefetch key; since a session uses only one http connection,
// this avoids interleaved connections to the key and segment file.
{
@@ -906,16 +1107,127 @@ void PlaylistFetcher::onDownloadNext() {
true /* first */);
if (err != OK) {
notifyError(err);
+ return false;
+ }
+ }
+
+ if ((mStartup && !mTimeChangeSignaled) || discontinuity) {
+ // We need to signal a time discontinuity to ATSParser on the
+ // first segment after start, or on a discontinuity segment.
+ // Setting mNextPTSTimeUs informs extractAndQueueAccessUnitsXX()
+ // to send the time discontinuity.
+ if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
+ // If this was a live event this made no sense since
+ // we don't have access to all the segment before the current
+ // one.
+ mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber);
+ }
+
+ // Setting mTimeChangeSignaled to true, so that if start time
+ // searching goes into 2nd segment (without a discontinuity),
+ // we don't reset time again. It causes corruption when pending
+ // data in ATSParser is cleared.
+ mTimeChangeSignaled = true;
+ }
+
+ if (discontinuity) {
+ ALOGI("queueing discontinuity (explicit=%d)", discontinuity);
+
+ // Signal a format discontinuity to ATSParser to clear partial data
+ // from previous streams. Not doing this causes bitstream corruption.
+ if (mTSParser != NULL) {
+ mTSParser->signalDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */);
+ }
+
+ queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE,
+ NULL /* extra */);
+
+ if (mStartup && mStartTimeUsRelative && mFirstPTSValid) {
+ // This means we guessed mStartTimeUs to be in the previous
+ // segment (likely very close to the end), but either video or
+ // audio has not found start by the end of that segment.
+ //
+ // If this new segment is not a discontinuity, keep searching.
+ //
+ // If this new segment even got a discontinuity marker, just
+ // set mStartTimeUs=0, and take all samples from now on.
+ mStartTimeUs = 0;
+ mFirstPTSValid = false;
+ }
+ }
+
+ FLOGV("fetching segment %d from (%d .. %d)",
+ mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist);
+ return true;
+}
+
+void PlaylistFetcher::onDownloadNext() {
+ AString uri;
+ sp<AMessage> itemMeta;
+ sp<ABuffer> buffer;
+ sp<ABuffer> tsBuffer;
+ int32_t firstSeqNumberInPlaylist = 0;
+ int32_t lastSeqNumberInPlaylist = 0;
+ bool connectHTTP = true;
+
+ if (mDownloadState->hasSavedState()) {
+ mDownloadState->restoreState(
+ uri,
+ itemMeta,
+ buffer,
+ tsBuffer,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ connectHTTP = false;
+ FLOGV("resuming: '%s'", uri.c_str());
+ } else {
+ if (!initDownloadState(
+ uri,
+ itemMeta,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist)) {
return;
}
+ FLOGV("fetching: '%s'", uri.c_str());
+ }
+
+ int64_t range_offset, range_length;
+ if (!itemMeta->findInt64("range-offset", &range_offset)
+ || !itemMeta->findInt64("range-length", &range_length)) {
+ range_offset = 0;
+ range_length = -1;
}
// block-wise download
- bool startup = mStartup;
+ bool shouldPause = false;
ssize_t bytesRead;
do {
+ sp<DataSource> source = mHTTPDataSource;
+
+ int64_t startUs = ALooper::GetNowUs();
bytesRead = mSession->fetchFile(
- uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, &source);
+ uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize,
+ &source, NULL, connectHTTP);
+
+ // add sample for bandwidth estimation, excluding samples from subtitles (as
+ // its too small), or during startup/resumeUntil (when we could have more than
+ // one connection open which affects bandwidth)
+ if (!mStartup && mStopParams == NULL && bytesRead > 0
+ && (mStreamTypeMask
+ & (LiveSession::STREAMTYPE_AUDIO
+ | LiveSession::STREAMTYPE_VIDEO))) {
+ int64_t delayUs = ALooper::GetNowUs() - startUs;
+ mSession->addBandwidthMeasurement(bytesRead, delayUs);
+
+ if (delayUs > 2000000ll) {
+ FLOGV("bytesRead %zd took %.2f seconds - abnormal bandwidth dip",
+ bytesRead, (double)delayUs / 1.0e6);
+ }
+ }
+
+ connectHTTP = false;
if (bytesRead < 0) {
status_t err = bytesRead;
@@ -941,28 +1253,7 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- if (startup || discontinuity) {
- // Signal discontinuity.
-
- if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
- // If this was a live event this made no sense since
- // we don't have access to all the segment before the current
- // one.
- mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber);
- }
-
- if (discontinuity) {
- ALOGI("queueing discontinuity (explicit=%d)", discontinuity);
-
- queueDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE,
- NULL /* extra */);
-
- discontinuity = false;
- }
-
- startup = false;
- }
+ bool startUp = mStartup; // save current start up state
err = OK;
if (bufferStartsWithTsSyncByte(buffer)) {
@@ -976,7 +1267,6 @@ void PlaylistFetcher::onDownloadNext() {
tsBuffer->setRange(tsOff, tsSize);
}
tsBuffer->setRange(tsBuffer->offset(), tsBuffer->size() + bytesRead);
-
err = extractAndQueueAccessUnitsFromTs(tsBuffer);
}
@@ -991,13 +1281,35 @@ void PlaylistFetcher::onDownloadNext() {
return;
} else if (err == ERROR_OUT_OF_RANGE) {
// reached stopping point
- stopAsync(/* clear = */ false);
+ notifyStopReached();
return;
} else if (err != OK) {
notifyError(err);
return;
}
-
+ // If we're switching, post start notification
+ // this should only be posted when the last chunk is full processed by TSParser
+ if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) {
+ CHECK(mStartTimeUsNotify != NULL);
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ shouldPause = true;
+ }
+ if (shouldPause || shouldPauseDownload()) {
+ // save state and return if this is not the last chunk,
+ // leaving the fetcher in paused state.
+ if (bytesRead != 0) {
+ mDownloadState->saveState(
+ uri,
+ itemMeta,
+ buffer,
+ tsBuffer,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ return;
+ }
+ shouldPause = true;
+ }
} while (bytesRead != 0);
if (bufferStartsWithTsSyncByte(buffer)) {
@@ -1034,7 +1346,6 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- err = OK;
if (tsBuffer != NULL) {
AString method;
CHECK(buffer->meta()->findString("cipher-method", &method));
@@ -1048,30 +1359,40 @@ void PlaylistFetcher::onDownloadNext() {
}
// bulk extract non-ts files
+ bool startUp = mStartup;
if (tsBuffer == NULL) {
- err = extractAndQueueAccessUnits(buffer, itemMeta);
+ status_t err = extractAndQueueAccessUnits(buffer, itemMeta);
if (err == -EAGAIN) {
// starting sequence number too low/high
postMonitorQueue();
return;
} else if (err == ERROR_OUT_OF_RANGE) {
// reached stopping point
- stopAsync(/* clear = */false);
+ notifyStopReached();
+ return;
+ } else if (err != OK) {
+ notifyError(err);
return;
}
}
- if (err != OK) {
- notifyError(err);
- return;
- }
-
++mSeqNumber;
- postMonitorQueue();
+ // if adapting, pause after found the next starting point
+ if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) {
+ CHECK(mStartTimeUsNotify != NULL);
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ shouldPause = true;
+ }
+
+ if (!shouldPause) {
+ postMonitorQueue();
+ }
}
-int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const {
+int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(
+ int64_t anchorTimeUs, int64_t targetDiffUs) const {
int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist;
if (mPlaylist->meta() == NULL
|| !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) {
@@ -1080,7 +1401,8 @@ int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const
lastSeqNumberInPlaylist = firstSeqNumberInPlaylist + mPlaylist->size() - 1;
int32_t index = mSeqNumber - firstSeqNumberInPlaylist - 1;
- while (index >= 0 && anchorTimeUs > mStartTimeUs) {
+ // adjust anchorTimeUs to within targetDiffUs from mStartTimeUs
+ while (index >= 0 && anchorTimeUs - mStartTimeUs > targetDiffUs) {
sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta));
@@ -1101,28 +1423,22 @@ int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const
int32_t PlaylistFetcher::getSeqNumberForDiscontinuity(size_t discontinuitySeq) const {
int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL
- || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) {
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
firstSeqNumberInPlaylist = 0;
}
- size_t curDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
- if (discontinuitySeq < curDiscontinuitySeq) {
- return firstSeqNumberInPlaylist <= 0 ? 0 : (firstSeqNumberInPlaylist - 1);
- }
-
size_t index = 0;
while (index < mPlaylist->size()) {
sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt( index, NULL /* uri */, &itemMeta));
-
- int64_t discontinuity;
- if (itemMeta->findInt64("discontinuity", &discontinuity)) {
- curDiscontinuitySeq++;
- }
-
+ size_t curDiscontinuitySeq;
+ CHECK(itemMeta->findInt32("discontinuity-sequence", (int32_t *)&curDiscontinuitySeq));
+ int32_t seqNumber = firstSeqNumberInPlaylist + index;
if (curDiscontinuitySeq == discontinuitySeq) {
- return firstSeqNumberInPlaylist + index;
+ return seqNumber;
+ } else if (curDiscontinuitySeq > discontinuitySeq) {
+ return seqNumber <= 0 ? 0 : seqNumber - 1;
}
++index;
@@ -1182,6 +1498,7 @@ const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties(
accessUnit->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);
accessUnit->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber));
+ accessUnit->meta()->setInt64("segmentDurationUs", getSegmentDurationUs(mSeqNumber));
return accessUnit;
}
@@ -1197,12 +1514,16 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
// ATSParser from skewing the timestamps of access units.
extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0);
+ // When adapting, signal a recent media time to the parser,
+ // so that PTS wrap around is handled for the new variant.
+ if (mStartTimeUs >= 0 && !mStartTimeUsRelative) {
+ extra->setInt64(IStreamListener::kKeyRecentMediaTimeUs, mStartTimeUs);
+ }
+
mTSParser->signalDiscontinuity(
ATSParser::DISCONTINUITY_TIME, extra);
- mAbsoluteTimeAnchorUs = mNextPTSTimeUs;
mNextPTSTimeUs = -1ll;
- mFirstPTSValid = false;
}
size_t offset = 0;
@@ -1222,30 +1543,15 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
for (size_t i = mPacketSources.size(); i-- > 0;) {
sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
- const char *key;
- ATSParser::SourceType type;
const LiveSession::StreamType stream = mPacketSources.keyAt(i);
- switch (stream) {
- case LiveSession::STREAMTYPE_VIDEO:
- type = ATSParser::VIDEO;
- key = "timeUsVideo";
- break;
-
- case LiveSession::STREAMTYPE_AUDIO:
- type = ATSParser::AUDIO;
- key = "timeUsAudio";
- break;
-
- case LiveSession::STREAMTYPE_SUBTITLES:
- {
- ALOGE("MPEG2 Transport streams do not contain subtitles.");
- return ERROR_MALFORMED;
- break;
- }
-
- default:
- TRESPASS();
+ if (stream == LiveSession::STREAMTYPE_SUBTITLES) {
+ ALOGE("MPEG2 Transport streams do not contain subtitles.");
+ return ERROR_MALFORMED;
}
+ const char *key = LiveSession::getKeyForStream(stream);
+ ATSParser::SourceType type =
+ (stream == LiveSession::STREAMTYPE_AUDIO) ?
+ ATSParser::AUDIO : ATSParser::VIDEO;
sp<AnotherPacketSource> source =
static_cast<AnotherPacketSource *>(
@@ -1255,116 +1561,139 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
continue;
}
- int64_t timeUs;
+ const char *mime;
+ sp<MetaData> format = source->getFormat();
+ bool isAvc = format != NULL && format->findCString(kKeyMIMEType, &mime)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+
sp<ABuffer> accessUnit;
status_t finalResult;
while (source->hasBufferAvailable(&finalResult)
&& source->dequeueAccessUnit(&accessUnit) == OK) {
+ int64_t timeUs;
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+ if (mSegmentFirstPTS < 0ll) {
+ mSegmentFirstPTS = timeUs;
+ if (!mStartTimeUsRelative) {
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ // mStartup
+ // mStartup is true until we have queued a packet for all the streams
+ // we are fetching. We queue packets whose timestamps are greater than
+ // mStartTimeUs.
+ // mSegmentStartTimeUs >= 0
+ // mSegmentStartTimeUs is non-negative when adapting or switching tracks
+ // mSeqNumber > firstSeqNumberInPlaylist
+ // don't decrement mSeqNumber if it already points to the 1st segment
+ // timeUs - mStartTimeUs > targetDurationUs:
+ // This and the 2 above conditions should only happen when adapting in a live
+ // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
+ // would start fetching after timeUs, which should be greater than mStartTimeUs;
+ // the old fetcher would then continue fetching data until timeUs. We don't want
+ // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to
+ // stop as early as possible. The definition of being "too far ahead" is
+ // arbitrary; here we use targetDurationUs as threshold.
+ int64_t targetDiffUs = (mSeekMode == LiveSession::kSeekModeNextSample
+ ? 0 : targetDurationUs);
+ if (mStartup && mSegmentStartTimeUs >= 0
+ && mSeqNumber > firstSeqNumberInPlaylist
+ && timeUs - mStartTimeUs > targetDiffUs) {
+ // we just guessed a starting timestamp that is too high when adapting in a
+ // live stream; re-adjust based on the actual timestamp extracted from the
+ // media segment; if we didn't move backward after the re-adjustment
+ // (newSeqNumber), start at least 1 segment prior.
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(
+ timeUs, targetDiffUs);
+
+ FLOGV("guessed wrong seq number: timeUs=%lld, mStartTimeUs=%lld, "
+ "targetDurationUs=%lld, mSeqNumber=%d, newSeq=%d, firstSeq=%d",
+ (long long)timeUs,
+ (long long)mStartTimeUs,
+ (long long)targetDurationUs,
+ mSeqNumber,
+ newSeqNumber,
+ firstSeqNumberInPlaylist);
+
+ if (newSeqNumber >= mSeqNumber) {
+ --mSeqNumber;
+ } else {
+ mSeqNumber = newSeqNumber;
+ }
+ mStartTimeUsNotify = mNotify->dup();
+ mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
+ mStartTimeUsNotify->setString("uri", mURI);
+ mIDRFound = false;
+ return -EAGAIN;
+ }
+ }
+ }
if (mStartup) {
if (!mFirstPTSValid) {
mFirstTimeUs = timeUs;
mFirstPTSValid = true;
}
+ bool startTimeReached = true;
if (mStartTimeUsRelative) {
+ FLOGV("startTimeUsRelative, timeUs (%lld) - %lld = %lld",
+ (long long)timeUs,
+ (long long)mFirstTimeUs,
+ (long long)(timeUs - mFirstTimeUs));
timeUs -= mFirstTimeUs;
if (timeUs < 0) {
+ FLOGV("clamp negative timeUs to 0");
timeUs = 0;
}
+ startTimeReached = (timeUs >= mStartTimeUs);
}
- if (timeUs < mStartTimeUs) {
- // buffer up to the closest preceding IDR frame
- ALOGV("timeUs %" PRId64 " us < mStartTimeUs %" PRId64 " us",
- timeUs, mStartTimeUs);
- const char *mime;
- sp<MetaData> format = source->getFormat();
- bool isAvc = false;
- if (format != NULL && format->findCString(kKeyMIMEType, &mime)
- && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
- isAvc = true;
- }
- if (isAvc && IsIDR(accessUnit)) {
- mVideoBuffer->clear();
- }
+ if (!startTimeReached || (isAvc && !mIDRFound)) {
+ // buffer up to the closest preceding IDR frame in the next segement,
+ // or the closest succeeding IDR frame after the exact position
+ FSLOGV(stream, "timeUs=%lld, mStartTimeUs=%lld, mIDRFound=%d",
+ (long long)timeUs, (long long)mStartTimeUs, mIDRFound);
if (isAvc) {
- mVideoBuffer->queueAccessUnit(accessUnit);
+ if (IsIDR(accessUnit)) {
+ mVideoBuffer->clear();
+ FSLOGV(stream, "found IDR, clear mVideoBuffer");
+ mIDRFound = true;
+ }
+ if (mIDRFound && mStartTimeUsRelative && !startTimeReached) {
+ mVideoBuffer->queueAccessUnit(accessUnit);
+ FSLOGV(stream, "saving AVC video AccessUnit");
+ }
}
-
- continue;
- }
- }
-
- CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
- if (mStartTimeUsNotify != NULL && timeUs > mStartTimeUs) {
- int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
- "media-sequence", &firstSeqNumberInPlaylist)) {
- firstSeqNumberInPlaylist = 0;
- }
-
- int32_t targetDurationSecs;
- CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
- int64_t targetDurationUs = targetDurationSecs * 1000000ll;
- // mStartup
- // mStartup is true until we have queued a packet for all the streams
- // we are fetching. We queue packets whose timestamps are greater than
- // mStartTimeUs.
- // mSegmentStartTimeUs >= 0
- // mSegmentStartTimeUs is non-negative when adapting or switching tracks
- // mSeqNumber > firstSeqNumberInPlaylist
- // don't decrement mSeqNumber if it already points to the 1st segment
- // timeUs - mStartTimeUs > targetDurationUs:
- // This and the 2 above conditions should only happen when adapting in a live
- // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
- // would start fetching after timeUs, which should be greater than mStartTimeUs;
- // the old fetcher would then continue fetching data until timeUs. We don't want
- // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to
- // stop as early as possible. The definition of being "too far ahead" is
- // arbitrary; here we use targetDurationUs as threshold.
- if (mStartup && mSegmentStartTimeUs >= 0
- && mSeqNumber > firstSeqNumberInPlaylist
- && timeUs - mStartTimeUs > targetDurationUs) {
- // we just guessed a starting timestamp that is too high when adapting in a
- // live stream; re-adjust based on the actual timestamp extracted from the
- // media segment; if we didn't move backward after the re-adjustment
- // (newSeqNumber), start at least 1 segment prior.
- int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
- if (newSeqNumber >= mSeqNumber) {
- --mSeqNumber;
- } else {
- mSeqNumber = newSeqNumber;
+ if (!startTimeReached || (isAvc && !mIDRFound)) {
+ continue;
}
- mStartTimeUsNotify = mNotify->dup();
- mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
- return -EAGAIN;
- }
-
- int32_t seq;
- if (!mStartTimeUsNotify->findInt32("discontinuitySeq", &seq)) {
- mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
}
- int64_t startTimeUs;
- if (!mStartTimeUsNotify->findInt64(key, &startTimeUs)) {
- mStartTimeUsNotify->setInt64(key, timeUs);
+ }
- uint32_t streamMask = 0;
- mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask);
+ if (mStartTimeUsNotify != NULL) {
+ uint32_t streamMask = 0;
+ mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask);
+ if (!(streamMask & mPacketSources.keyAt(i))) {
streamMask |= mPacketSources.keyAt(i);
mStartTimeUsNotify->setInt32("streamMask", streamMask);
+ FSLOGV(stream, "found start point, timeUs=%lld, streamMask becomes %x",
+ (long long)timeUs, streamMask);
if (streamMask == mStreamTypeMask) {
+ FLOGV("found start point for all streams");
mStartup = false;
- mStartTimeUsNotify->post();
- mStartTimeUsNotify.clear();
}
}
}
if (mStopParams != NULL) {
- // Queue discontinuity in original stream.
int32_t discontinuitySeq;
int64_t stopTimeUs;
if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq)
@@ -1372,14 +1701,13 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
|| !mStopParams->findInt64(key, &stopTimeUs)
|| (discontinuitySeq == mDiscontinuitySeq
&& timeUs >= stopTimeUs)) {
- packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
+ FSLOGV(stream, "reached stop point, timeUs=%lld", (long long)timeUs);
mStreamTypeMask &= ~stream;
mPacketSources.removeItemsAt(i);
break;
}
}
- // Note that we do NOT dequeue any discontinuities except for format change.
if (stream == LiveSession::STREAMTYPE_VIDEO) {
const bool discard = true;
status_t status;
@@ -1388,11 +1716,16 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
mVideoBuffer->dequeueAccessUnit(&videoBuffer);
setAccessUnitProperties(videoBuffer, source, discard);
packetSource->queueAccessUnit(videoBuffer);
+ int64_t bufferTimeUs;
+ CHECK(videoBuffer->meta()->findInt64("timeUs", &bufferTimeUs));
+ FSLOGV(stream, "queueAccessUnit (saved), timeUs=%lld",
+ (long long)bufferTimeUs);
}
}
setAccessUnitProperties(accessUnit, source);
packetSource->queueAccessUnit(accessUnit);
+ FSLOGV(stream, "queueAccessUnit, timeUs=%lld", (long long)timeUs);
}
if (err != OK) {
@@ -1410,7 +1743,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
if (!mStreamTypeMask) {
// Signal gap is filled between original and new stream.
- ALOGV("ERROR OUT OF RANGE");
+ FLOGV("reached stop point for all streams");
return ERROR_OUT_OF_RANGE;
}
@@ -1467,8 +1800,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
}
if (mNextPTSTimeUs >= 0ll) {
- mFirstPTSValid = false;
- mAbsoluteTimeAnchorUs = mNextPTSTimeUs;
mNextPTSTimeUs = -1ll;
}
@@ -1569,7 +1900,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
CHECK(packetSource->getFormat()->findInt32(kKeySampleRate, &sampleRate));
int64_t timeUs = (PTS * 100ll) / 9ll;
- if (!mFirstPTSValid) {
+ if (mStartup && !mFirstPTSValid) {
mFirstPTSValid = true;
mFirstTimeUs = timeUs;
}
@@ -1621,10 +1952,13 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ int64_t targetDiffUs =(mSeekMode == LiveSession::kSeekModeNextSample
+ ? 0 : targetDurationUs);
// Duplicated logic from how we handle .ts playlists.
if (mStartup && mSegmentStartTimeUs >= 0
- && timeUs - mStartTimeUs > targetDurationUs) {
- int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
+ && timeUs - mStartTimeUs > targetDiffUs) {
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(
+ timeUs, targetDiffUs);
if (newSeqNumber >= mSeqNumber) {
--mSeqNumber;
} else {
@@ -1633,24 +1967,18 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
return -EAGAIN;
}
- mStartTimeUsNotify->setInt64("timeUsAudio", timeUs);
- mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO);
- mStartTimeUsNotify->post();
- mStartTimeUsNotify.clear();
mStartup = false;
}
}
if (mStopParams != NULL) {
- // Queue discontinuity in original stream.
int32_t discontinuitySeq;
int64_t stopTimeUs;
if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq)
|| discontinuitySeq > mDiscontinuitySeq
|| !mStopParams->findInt64("timeUsAudio", &stopTimeUs)
|| (discontinuitySeq == mDiscontinuitySeq && unitTimeUs >= stopTimeUs)) {
- packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
mStreamTypeMask = 0;
mPacketSources.clear();
return ERROR_OUT_OF_RANGE;
@@ -1687,33 +2015,15 @@ void PlaylistFetcher::updateDuration() {
msg->post();
}
-int64_t PlaylistFetcher::resumeThreshold(const sp<AMessage> &msg) {
- int64_t durationUs;
- if (msg->findInt64("durationUs", &durationUs) && durationUs > 0) {
- return kNumSkipFrames * durationUs;
- }
-
- sp<RefBase> obj;
- msg->findObject("format", &obj);
- MetaData *format = static_cast<MetaData *>(obj.get());
-
- const char *mime;
- CHECK(format->findCString(kKeyMIMEType, &mime));
- bool audio = !strncasecmp(mime, "audio/", 6);
- if (audio) {
- // Assumes 1000 samples per frame.
- int32_t sampleRate;
- CHECK(format->findInt32(kKeySampleRate, &sampleRate));
- return kNumSkipFrames /* frames */ * 1000 /* samples */
- * (1000000 / sampleRate) /* sample duration (us) */;
- } else {
- int32_t frameRate;
- if (format->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
- return kNumSkipFrames * (1000000 / frameRate);
- }
- }
+void PlaylistFetcher::updateTargetDuration() {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
- return 500000ll;
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTargetDurationUpdate);
+ msg->setInt64("targetDurationUs", targetDurationUs);
+ msg->post();
}
} // namespace android
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 4e15f85..f64d160 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -36,6 +36,7 @@ class String8;
struct PlaylistFetcher : public AHandler {
static const int64_t kMinBufferedDurationUs;
static const int32_t kDownloadBlockSize;
+ static const int64_t kFetcherResumeThreshold;
enum {
kWhatStarted,
@@ -43,18 +44,22 @@ struct PlaylistFetcher : public AHandler {
kWhatStopped,
kWhatError,
kWhatDurationUpdate,
- kWhatTemporarilyDoneFetching,
+ kWhatTargetDurationUpdate,
kWhatPrepared,
kWhatPreparationFailed,
kWhatStartedAt,
+ kWhatStopReached,
};
PlaylistFetcher(
const sp<AMessage> &notify,
const sp<LiveSession> &session,
const char *uri,
+ int32_t id,
int32_t subtitleGeneration);
+ int32_t getFetcherID() const;
+
sp<DataSource> getDataSource();
void startAsync(
@@ -64,10 +69,10 @@ struct PlaylistFetcher : public AHandler {
int64_t startTimeUs = -1ll, // starting timestamps
int64_t segmentStartTimeUs = -1ll, // starting position within playlist
// startTimeUs!=segmentStartTimeUs only when playlist is live
- int32_t startDiscontinuitySeq = 0,
- bool adaptive = false);
+ int32_t startDiscontinuitySeq = -1,
+ LiveSession::SeekMode seekMode = LiveSession::kSeekModeExactPosition);
- void pauseAsync();
+ void pauseAsync(float thresholdRatio);
void stopAsync(bool clear = true);
@@ -95,6 +100,8 @@ private:
kWhatDownloadNext = 'dlnx',
};
+ struct DownloadState;
+
static const int64_t kMaxMonitorDelayUs;
static const int32_t kNumSkipFrames;
@@ -105,9 +112,12 @@ private:
sp<AMessage> mNotify;
sp<AMessage> mStartTimeUsNotify;
+ sp<HTTPBase> mHTTPDataSource;
sp<LiveSession> mSession;
AString mURI;
+ int32_t mFetcherID;
+
uint32_t mStreamTypeMask;
int64_t mStartTimeUs;
@@ -116,7 +126,7 @@ private:
// adapting or switching tracks.
int64_t mSegmentStartTimeUs;
- ssize_t mDiscontinuitySeq;
+ int32_t mDiscontinuitySeq;
bool mStartTimeUsRelative;
sp<AMessage> mStopParams; // message containing the latest timestamps we should fetch.
@@ -130,13 +140,16 @@ private:
int32_t mSeqNumber;
int32_t mNumRetries;
bool mStartup;
- bool mAdaptive;
- bool mPrepared;
+ bool mIDRFound;
+ int32_t mSeekMode;
+ bool mTimeChangeSignaled;
int64_t mNextPTSTimeUs;
int32_t mMonitorQueueGeneration;
const int32_t mSubtitleGeneration;
+ int32_t mLastDiscontinuitySeq;
+
enum RefreshState {
INITIAL_MINIMUM_RELOAD_DELAY,
FIRST_UNCHANGED_RELOAD_ATTEMPT,
@@ -150,9 +163,8 @@ private:
sp<ATSParser> mTSParser;
bool mFirstPTSValid;
- uint64_t mFirstPTS;
int64_t mFirstTimeUs;
- int64_t mAbsoluteTimeAnchorUs;
+ int64_t mSegmentFirstPTS;
sp<AnotherPacketSource> mVideoBuffer;
// Stores the initialization vector to decrypt the next block of cipher text, which can
@@ -160,6 +172,11 @@ private:
// the last block of cipher text (cipher-block chaining).
unsigned char mAESInitVec[16];
+ Mutex mThresholdLock;
+ float mThresholdRatio;
+
+ sp<DownloadState> mDownloadState;
+
// Set first to true if decrypting the first segment of a playlist segment. When
// first is true, reset the initialization vector based on the available
// information in the manifest; otherwise, use the initialization vector as
@@ -175,6 +192,8 @@ private:
void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0);
void cancelMonitorQueue();
+ void setStoppingThreshold(float thresholdRatio);
+ bool shouldPauseDownload();
int64_t delayUsToRefreshPlaylist() const;
status_t refreshPlaylist();
@@ -182,12 +201,19 @@ private:
// Returns the media time in us of the segment specified by seqNumber.
// This is computed by summing the durations of all segments before it.
int64_t getSegmentStartTimeUs(int32_t seqNumber) const;
+ // Returns the duration time in us of the segment specified.
+ int64_t getSegmentDurationUs(int32_t seqNumber) const;
status_t onStart(const sp<AMessage> &msg);
void onPause();
void onStop(const sp<AMessage> &msg);
void onMonitorQueue();
void onDownloadNext();
+ bool initDownloadState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
// Resume a fetcher to continue until the stopping point stored in msg.
status_t onResumeUntil(const sp<AMessage> &msg);
@@ -201,20 +227,19 @@ private:
status_t extractAndQueueAccessUnits(
const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta);
+ void notifyStopReached();
void notifyError(status_t err);
void queueDiscontinuity(
ATSParser::DiscontinuityType type, const sp<AMessage> &extra);
- int32_t getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const;
+ int32_t getSeqNumberWithAnchorTime(
+ int64_t anchorTimeUs, int64_t targetDurationUs) const;
int32_t getSeqNumberForDiscontinuity(size_t discontinuitySeq) const;
int32_t getSeqNumberForTime(int64_t timeUs) const;
void updateDuration();
-
- // Before resuming a fetcher in onResume, check the remaining duration is longer than that
- // returned by resumeThreshold.
- int64_t resumeThreshold(const sp<AMessage> &msg);
+ void updateTargetDuration();
DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher);
};
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index 1fe6fcf..3067c3d 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -83,6 +83,8 @@ private:
Vector<SidxEntry> mSidxEntries;
off64_t mMoofOffset;
+ bool mMoofFound;
+ bool mMdatFound;
Vector<PsshInfo> mPssh;
@@ -102,11 +104,15 @@ private:
String8 mLastCommentName;
String8 mLastCommentData;
+ KeyedVector<uint32_t, AString> mMetaKeyMap;
+
status_t readMetaData();
status_t parseChunk(off64_t *offset, int depth);
status_t parseITunesMetaData(off64_t offset, size_t size);
status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
void parseID3v2MetaData(off64_t offset);
+ status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
+ status_t parseQTMetaVal(int32_t keyId, off64_t data_offset, size_t data_size);
status_t updateAudioTrackInfoFromESDS_MPEG4Audio(
const void *esds_data, size_t esds_size);
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index c270bc1..dafa07e 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -36,6 +36,11 @@ enum {
kAVCProfileCAVLC444Intra = 0x2c
};
+struct NALPosition {
+ size_t nalOffset;
+ size_t nalSize;
+};
+
// Optionally returns sample aspect ratio as well.
void FindAVCDimensions(
const sp<ABuffer> &seqParamSet,
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 482ccff..0a868bc 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -35,6 +35,7 @@
#include <media/stagefright/Utils.h>
#include <media/IStreamSource.h>
#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
#include <inttypes.h>
@@ -47,7 +48,8 @@ namespace android {
static const size_t kTSPacketSize = 188;
struct ATSParser::Program : public RefBase {
- Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID);
+ Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID,
+ int64_t lastRecoveredPTS);
bool parsePSISection(
unsigned pid, ABitReader *br, status_t *err);
@@ -86,14 +88,22 @@ struct ATSParser::Program : public RefBase {
}
private:
+ struct StreamInfo {
+ unsigned mType;
+ unsigned mPID;
+ };
+
ATSParser *mParser;
unsigned mProgramNumber;
unsigned mProgramMapPID;
KeyedVector<unsigned, sp<Stream> > mStreams;
bool mFirstPTSValid;
uint64_t mFirstPTS;
+ int64_t mLastRecoveredPTS;
status_t parseProgramMap(ABitReader *br);
+ int64_t recoverPTS(uint64_t PTS_33bit);
+ bool switchPIDs(const Vector<StreamInfo> &infos);
DISALLOW_EVIL_CONSTRUCTORS(Program);
};
@@ -157,10 +167,12 @@ struct ATSParser::PSISection : public RefBase {
PSISection();
status_t append(const void *data, size_t size);
+ void setSkipBytes(uint8_t skip);
void clear();
bool isComplete() const;
bool isEmpty() const;
+ bool isCRCOkay() const;
const uint8_t *data() const;
size_t size() const;
@@ -170,6 +182,8 @@ protected:
private:
sp<ABuffer> mBuffer;
+ uint8_t mSkipBytes;
+ static uint32_t CRC_TABLE[];
DISALLOW_EVIL_CONSTRUCTORS(PSISection);
};
@@ -177,12 +191,14 @@ private:
////////////////////////////////////////////////////////////////////////////////
ATSParser::Program::Program(
- ATSParser *parser, unsigned programNumber, unsigned programMapPID)
+ ATSParser *parser, unsigned programNumber, unsigned programMapPID,
+ int64_t lastRecoveredPTS)
: mParser(parser),
mProgramNumber(programNumber),
mProgramMapPID(programMapPID),
mFirstPTSValid(false),
- mFirstPTS(0) {
+ mFirstPTS(0),
+ mLastRecoveredPTS(lastRecoveredPTS) {
ALOGV("new program number %u", programNumber);
}
@@ -237,10 +253,71 @@ void ATSParser::Program::signalEOS(status_t finalResult) {
}
}
-struct StreamInfo {
- unsigned mType;
- unsigned mPID;
-};
+bool ATSParser::Program::switchPIDs(const Vector<StreamInfo> &infos) {
+ bool success = false;
+
+ if (mStreams.size() == infos.size()) {
+ // build type->PIDs map for old and new mapping
+ size_t i;
+ KeyedVector<int32_t, Vector<int32_t> > oldType2PIDs, newType2PIDs;
+ for (i = 0; i < mStreams.size(); ++i) {
+ ssize_t index = oldType2PIDs.indexOfKey(mStreams[i]->type());
+ if (index < 0) {
+ oldType2PIDs.add(mStreams[i]->type(), Vector<int32_t>());
+ }
+ oldType2PIDs.editValueFor(mStreams[i]->type()).push_back(mStreams[i]->pid());
+ }
+ for (i = 0; i < infos.size(); ++i) {
+ ssize_t index = newType2PIDs.indexOfKey(infos[i].mType);
+ if (index < 0) {
+ newType2PIDs.add(infos[i].mType, Vector<int32_t>());
+ }
+ newType2PIDs.editValueFor(infos[i].mType).push_back(infos[i].mPID);
+ }
+
+ // we can recover if the number of streams for each type hasn't changed
+ if (oldType2PIDs.size() == newType2PIDs.size()) {
+ success = true;
+ for (i = 0; i < oldType2PIDs.size(); ++i) {
+ // KeyedVector is sorted, we just compare key and size of each index
+ if (oldType2PIDs.keyAt(i) != newType2PIDs.keyAt(i)
+ || oldType2PIDs[i].size() != newType2PIDs[i].size()) {
+ success = false;
+ break;
+ }
+ }
+ }
+
+ if (success) {
+ // save current streams to temp
+ KeyedVector<int32_t, sp<Stream> > temp;
+ for (i = 0; i < mStreams.size(); ++i) {
+ temp.add(mStreams.keyAt(i), mStreams.editValueAt(i));
+ }
+
+ mStreams.clear();
+ for (i = 0; i < temp.size(); ++i) {
+ // The two checks below shouldn't happen,
+ // we already checked above the stream count matches
+ ssize_t index = newType2PIDs.indexOfKey(temp[i]->type());
+ CHECK(index >= 0);
+ Vector<int32_t> &newPIDs = newType2PIDs.editValueAt(index);
+ CHECK(newPIDs.size() > 0);
+
+ // get the next PID for temp[i]->type() in the new PID map
+ Vector<int32_t>::iterator it = newPIDs.begin();
+
+ // change the PID of the stream, and add it back
+ temp.editValueAt(i)->setPID(*it);
+ mStreams.add(temp[i]->pid(), temp.editValueAt(i));
+
+ // removed the used PID
+ newPIDs.erase(it);
+ }
+ }
+ }
+ return success;
+}
status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
unsigned table_id = br->getBits(8);
@@ -369,39 +446,8 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
}
#endif
- // The only case we can recover from is if we have two streams
- // and they switched PIDs.
-
- bool success = false;
-
- if (mStreams.size() == 2 && infos.size() == 2) {
- const StreamInfo &info1 = infos.itemAt(0);
- const StreamInfo &info2 = infos.itemAt(1);
-
- sp<Stream> s1 = mStreams.editValueAt(0);
- sp<Stream> s2 = mStreams.editValueAt(1);
-
- bool caseA =
- info1.mPID == s1->pid() && info1.mType == s2->type()
- && info2.mPID == s2->pid() && info2.mType == s1->type();
-
- bool caseB =
- info1.mPID == s2->pid() && info1.mType == s1->type()
- && info2.mPID == s1->pid() && info2.mType == s2->type();
-
- if (caseA || caseB) {
- unsigned pid1 = s1->pid();
- unsigned pid2 = s2->pid();
- s1->setPID(pid2);
- s2->setPID(pid1);
-
- mStreams.clear();
- mStreams.add(s1->pid(), s1);
- mStreams.add(s2->pid(), s2);
-
- success = true;
- }
- }
+ // we can recover if number of streams for each type remain the same
+ bool success = switchPIDs(infos);
if (!success) {
ALOGI("Stream PIDs changed and we cannot recover.");
@@ -425,6 +471,32 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
return OK;
}
+int64_t ATSParser::Program::recoverPTS(uint64_t PTS_33bit) {
+ // We only have the lower 33-bit of the PTS. It could overflow within a
+ // reasonable amount of time. To handle the wrap-around, use fancy math
+ // to get an extended PTS that is within [-0xffffffff, 0xffffffff]
+ // of the latest recovered PTS.
+ if (mLastRecoveredPTS < 0ll) {
+ // Use the original 33bit number for 1st frame, the reason is that
+ // if 1st frame wraps to negative that's far away from 0, we could
+ // never start. Only start wrapping around from 2nd frame.
+ mLastRecoveredPTS = static_cast<int64_t>(PTS_33bit);
+ } else {
+ mLastRecoveredPTS = static_cast<int64_t>(
+ ((mLastRecoveredPTS - PTS_33bit + 0x100000000ll)
+ & 0xfffffffe00000000ull) | PTS_33bit);
+ // We start from 0, but recovered PTS could be slightly below 0.
+ // Clamp it to 0 as rest of the pipeline doesn't take negative pts.
+ // (eg. video is read first and starts at 0, but audio starts at 0xfffffff0)
+ if (mLastRecoveredPTS < 0ll) {
+ ALOGI("Clamping negative recovered PTS (%" PRId64 ") to 0", mLastRecoveredPTS);
+ mLastRecoveredPTS = 0ll;
+ }
+ }
+
+ return mLastRecoveredPTS;
+}
+
sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
size_t index = (type == AUDIO) ? 0 : 0;
@@ -455,6 +527,8 @@ bool ATSParser::Program::hasSource(SourceType type) const {
}
int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) {
+ PTS = recoverPTS(PTS);
+
if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) {
if (!mFirstPTSValid) {
mFirstPTSValid = true;
@@ -969,6 +1043,7 @@ ATSParser::ATSParser(uint32_t flags)
mAbsoluteTimeAnchorUs(-1ll),
mTimeOffsetValid(false),
mTimeOffsetUs(0ll),
+ mLastRecoveredPTS(-1ll),
mNumTSPacketsParsed(0),
mNumPCRs(0) {
mPSISections.add(0 /* PID */, new PSISection);
@@ -987,11 +1062,21 @@ status_t ATSParser::feedTSPacket(const void *data, size_t size) {
void ATSParser::signalDiscontinuity(
DiscontinuityType type, const sp<AMessage> &extra) {
int64_t mediaTimeUs;
- if ((type & DISCONTINUITY_TIME)
- && extra != NULL
- && extra->findInt64(
- IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) {
- mAbsoluteTimeAnchorUs = mediaTimeUs;
+ if ((type & DISCONTINUITY_TIME) && extra != NULL) {
+ if (extra->findInt64(IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) {
+ mAbsoluteTimeAnchorUs = mediaTimeUs;
+ }
+ if ((mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)
+ && extra->findInt64(
+ IStreamListener::kKeyRecentMediaTimeUs, &mediaTimeUs)) {
+ if (mAbsoluteTimeAnchorUs >= 0ll) {
+ mediaTimeUs -= mAbsoluteTimeAnchorUs;
+ }
+ if (mTimeOffsetValid) {
+ mediaTimeUs -= mTimeOffsetUs;
+ }
+ mLastRecoveredPTS = (mediaTimeUs * 9) / 100;
+ }
} else if (type == DISCONTINUITY_ABSOLUTE_TIME) {
int64_t timeUs;
CHECK(extra->findInt64("timeUs", &timeUs));
@@ -1075,7 +1160,7 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) {
if (!found) {
mPrograms.push(
- new Program(this, program_number, programMapPID));
+ new Program(this, program_number, programMapPID, mLastRecoveredPTS));
}
if (mPSISections.indexOfKey(programMapPID) < 0) {
@@ -1098,10 +1183,12 @@ status_t ATSParser::parsePID(
if (payload_unit_start_indicator) {
if (!section->isEmpty()) {
- return ERROR_UNSUPPORTED;
+ ALOGW("parsePID encounters payload_unit_start_indicator when section is not empty");
+ section->clear();
}
unsigned skip = br->getBits(8);
+ section->setSkipBytes(skip + 1); // skip filler bytes + pointer field itself
br->skipBits(skip * 8);
}
@@ -1116,6 +1203,9 @@ status_t ATSParser::parsePID(
return OK;
}
+ if (!section->isCRCOkay()) {
+ return BAD_VALUE;
+ }
ABitReader sectionBits(section->data(), section->size());
if (PID == 0) {
@@ -1338,7 +1428,79 @@ void ATSParser::updatePCR(
////////////////////////////////////////////////////////////////////////////////
-ATSParser::PSISection::PSISection() {
+
+// CRC32 used for PSI section. The table was generated by following command:
+// $ python pycrc.py --model crc-32-mpeg --algorithm table-driven --generate c
+// Visit http://www.tty1.net/pycrc/index_en.html for more details.
+uint32_t ATSParser::PSISection::CRC_TABLE[] = {
+ 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
+ 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
+ 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
+ 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
+ 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
+ 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
+ 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
+ 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
+ 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
+ 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
+ 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
+ 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
+ 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
+ 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
+ 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
+ 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
+ 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
+ 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
+ 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
+ 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
+ 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
+ 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
+ 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
+ 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
+ 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
+ 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
+ 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
+ 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
+ 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
+ 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
+ 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
+ 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
+ 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
+ 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
+ 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
+ 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
+ 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
+ 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
+ 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
+ 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
+ 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
+ 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
+ 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
+ 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
+ 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
+ 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
+ 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
+ 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
+ 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
+ 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
+ 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
+ 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
+ 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
+ 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
+ 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
+ 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
+ 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
+ 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
+ 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
+ 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
+ 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
+ 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
+ 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
+ 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
+ };
+
+ATSParser::PSISection::PSISection() :
+ mSkipBytes(0) {
}
ATSParser::PSISection::~PSISection() {
@@ -1369,10 +1531,15 @@ status_t ATSParser::PSISection::append(const void *data, size_t size) {
return OK;
}
+void ATSParser::PSISection::setSkipBytes(uint8_t skip) {
+ mSkipBytes = skip;
+}
+
void ATSParser::PSISection::clear() {
if (mBuffer != NULL) {
mBuffer->setRange(0, 0);
}
+ mSkipBytes = 0;
}
bool ATSParser::PSISection::isComplete() const {
@@ -1396,4 +1563,30 @@ size_t ATSParser::PSISection::size() const {
return mBuffer == NULL ? 0 : mBuffer->size();
}
+bool ATSParser::PSISection::isCRCOkay() const {
+ if (!isComplete()) {
+ return false;
+ }
+ uint8_t* data = mBuffer->data();
+
+ // Return true if section_syntax_indicator says no section follows the field section_length.
+ if ((data[1] & 0x80) == 0) {
+ return true;
+ }
+
+ unsigned sectionLength = U16_AT(data + 1) & 0xfff;
+ ALOGV("sectionLength %u, skip %u", sectionLength, mSkipBytes);
+
+ // Skip the preceding field present when payload start indicator is on.
+ sectionLength -= mSkipBytes;
+
+ uint32_t crc = 0xffffffff;
+ for(unsigned i = 0; i < sectionLength + 4 /* crc */; i++) {
+ uint8_t b = data[i];
+ int index = ((crc >> 24) ^ (b & 0xff)) & 0xff;
+ crc = CRC_TABLE[index] ^ (crc << 8);
+ }
+ ALOGV("crc: %08x\n", crc);
+ return (crc == 0);
+}
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 75d76dc..a1405bd 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -46,6 +46,9 @@ struct ATSParser : public RefBase {
DISCONTINUITY_AUDIO_FORMAT
| DISCONTINUITY_VIDEO_FORMAT
| DISCONTINUITY_TIME,
+ DISCONTINUITY_FORMAT_ONLY =
+ DISCONTINUITY_AUDIO_FORMAT
+ | DISCONTINUITY_VIDEO_FORMAT,
};
enum Flags {
@@ -115,6 +118,7 @@ private:
bool mTimeOffsetValid;
int64_t mTimeOffsetUs;
+ int64_t mLastRecoveredPTS;
size_t mNumTSPacketsParsed;
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index f266fe7..a4f8739 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -19,6 +19,8 @@
#include "AnotherPacketSource.h"
+#include "include/avc_utils.h"
+
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -27,6 +29,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
#include <utils/Vector.h>
#include <inttypes.h>
@@ -38,6 +41,7 @@ const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
mIsVideo(false),
+ mEnabled(true),
mFormat(NULL),
mLastQueuedTimeUs(0),
mEOSResult(OK),
@@ -48,7 +52,10 @@ AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
}
void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
- CHECK(mFormat == NULL);
+ if (mFormat != NULL) {
+ // Only allowed to be set once. Requires explicit clear to reset.
+ return;
+ }
mIsAudio = false;
mIsVideo = false;
@@ -91,13 +98,12 @@ sp<MetaData> AnotherPacketSource::getFormat() {
while (it != mBuffers.end()) {
sp<ABuffer> buffer = *it;
int32_t discontinuity;
- if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
- break;
- }
-
- sp<RefBase> object;
- if (buffer->meta()->findObject("format", &object)) {
- return mFormat = static_cast<MetaData*>(object.get());
+ if (!buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ sp<RefBase> object;
+ if (buffer->meta()->findObject("format", &object)) {
+ setFormat(static_cast<MetaData*>(object.get()));
+ return mFormat;
+ }
}
++it;
@@ -131,7 +137,7 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
sp<RefBase> object;
if ((*buffer)->meta()->findObject("format", &object)) {
- mFormat = static_cast<MetaData*>(object.get());
+ setFormat(static_cast<MetaData*>(object.get()));
}
return OK;
@@ -153,7 +159,6 @@ status_t AnotherPacketSource::read(
const sp<ABuffer> buffer = *mBuffers.begin();
mBuffers.erase(mBuffers.begin());
- mLatestDequeuedMeta = buffer->meta()->dup();
int32_t discontinuity;
if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
@@ -164,9 +169,11 @@ status_t AnotherPacketSource::read(
return INFO_DISCONTINUITY;
}
+ mLatestDequeuedMeta = buffer->meta()->dup();
+
sp<RefBase> object;
if (buffer->meta()->findObject("format", &object)) {
- mFormat = static_cast<MetaData*>(object.get());
+ setFormat(static_cast<MetaData*>(object.get()));
}
int64_t timeUs;
@@ -176,6 +183,11 @@ status_t AnotherPacketSource::read(
mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
+ int32_t isSync;
+ if (buffer->meta()->findInt32("isSync", &isSync)) {
+ mediaBuffer->meta_data()->setInt32(kKeyIsSyncFrame, isSync);
+ }
+
*out = mediaBuffer;
return OK;
}
@@ -203,20 +215,26 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
return;
}
- int64_t lastQueuedTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
- mLastQueuedTimeUs = lastQueuedTimeUs;
- ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
-
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
mCondition.signal();
int32_t discontinuity;
if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ // discontinuity handling needs to be consistent with queueDiscontinuity()
++mQueuedDiscontinuityCount;
+ mLastQueuedTimeUs = 0ll;
+ mEOSResult = OK;
+ mLatestEnqueuedMeta = NULL;
+ return;
}
+ int64_t lastQueuedTimeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
+ mLastQueuedTimeUs = lastQueuedTimeUs;
+ ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)",
+ mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
+
if (mLatestEnqueuedMeta == NULL) {
mLatestEnqueuedMeta = buffer->meta()->dup();
} else {
@@ -296,6 +314,10 @@ void AnotherPacketSource::signalEOS(status_t result) {
bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
+ *finalResult = OK;
+ if (!mEnabled) {
+ return false;
+ }
if (!mBuffers.empty()) {
return true;
}
@@ -304,6 +326,24 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
return false;
}
+bool AnotherPacketSource::hasDataBufferAvailable(status_t *finalResult) {
+ Mutex::Autolock autoLock(mLock);
+ *finalResult = OK;
+ if (!mEnabled) {
+ return false;
+ }
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); it++) {
+ int32_t discontinuity;
+ if (!(*it)->meta()->findInt32("discontinuity", &discontinuity)) {
+ return true;
+ }
+ }
+
+ *finalResult = mEOSResult;
+ return false;
+}
+
int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
return getBufferedDurationUs_l(finalResult);
@@ -320,10 +360,15 @@ int64_t AnotherPacketSource::getBufferedDurationUs_l(status_t *finalResult) {
int64_t time2 = -1;
int64_t durationUs = 0;
- List<sp<ABuffer> >::iterator it = mBuffers.begin();
- while (it != mBuffers.end()) {
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); it++) {
const sp<ABuffer> &buffer = *it;
+ int32_t discard;
+ if (buffer->meta()->findInt32("discard", &discard) && discard) {
+ continue;
+ }
+
int64_t timeUs;
if (buffer->meta()->findInt64("timeUs", &timeUs)) {
if (time1 < 0 || timeUs < time1) {
@@ -338,8 +383,6 @@ int64_t AnotherPacketSource::getBufferedDurationUs_l(status_t *finalResult) {
durationUs += time2 - time1;
time1 = time2 = -1;
}
-
- ++it;
}
return durationUs + (time2 - time1);
@@ -358,11 +401,19 @@ int64_t AnotherPacketSource::getEstimatedDurationUs() {
return getBufferedDurationUs_l(&finalResult);
}
- List<sp<ABuffer> >::iterator it = mBuffers.begin();
- sp<ABuffer> buffer = *it;
+ sp<ABuffer> buffer;
+ int32_t discard;
+ int64_t startTimeUs = -1ll;
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); it++) {
+ buffer = *it;
+ if (buffer->meta()->findInt32("discard", &discard) && discard) {
+ continue;
+ }
+ buffer->meta()->findInt64("timeUs", &startTimeUs);
+ break;
+ }
- int64_t startTimeUs;
- buffer->meta()->findInt64("timeUs", &startTimeUs);
if (startTimeUs < 0) {
return 0;
}
@@ -422,4 +473,152 @@ sp<AMessage> AnotherPacketSource::getLatestDequeuedMeta() {
return mLatestDequeuedMeta;
}
+void AnotherPacketSource::enable(bool enable) {
+ Mutex::Autolock autoLock(mLock);
+ mEnabled = enable;
+}
+
+/*
+ * returns the sample meta that's delayUs after queue head
+ * (NULL if such sample is unavailable)
+ */
+sp<AMessage> AnotherPacketSource::getMetaAfterLastDequeued(int64_t delayUs) {
+ Mutex::Autolock autoLock(mLock);
+ int64_t firstUs = -1;
+ int64_t lastUs = -1;
+ int64_t durationUs = 0;
+
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ durationUs += lastUs - firstUs;
+ firstUs = -1;
+ lastUs = -1;
+ continue;
+ }
+ int64_t timeUs;
+ if (buffer->meta()->findInt64("timeUs", &timeUs)) {
+ if (firstUs < 0) {
+ firstUs = timeUs;
+ }
+ if (lastUs < 0 || timeUs > lastUs) {
+ lastUs = timeUs;
+ }
+ if (durationUs + (lastUs - firstUs) >= delayUs) {
+ return buffer->meta();
+ }
+ }
+ }
+ return NULL;
+}
+
+/*
+ * removes samples with time equal or after meta
+ */
+void AnotherPacketSource::trimBuffersAfterMeta(
+ const sp<AMessage> &meta) {
+ if (meta == NULL) {
+ ALOGW("trimming with NULL meta, ignoring");
+ return;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffers.empty()) {
+ return;
+ }
+
+ HLSTime stopTime(meta);
+ ALOGV("trimBuffersAfterMeta: discontinuitySeq %d, timeUs %lld",
+ stopTime.mSeq, (long long)stopTime.mTimeUs);
+
+ List<sp<ABuffer> >::iterator it;
+ sp<AMessage> newLatestEnqueuedMeta = NULL;
+ int64_t newLastQueuedTimeUs = 0;
+ size_t newDiscontinuityCount = 0;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ newDiscontinuityCount++;
+ continue;
+ }
+
+ HLSTime curTime(buffer->meta());
+ if (!(curTime < stopTime)) {
+ ALOGV("trimming from %lld (inclusive) to end",
+ (long long)curTime.mTimeUs);
+ break;
+ }
+ newLatestEnqueuedMeta = buffer->meta();
+ newLastQueuedTimeUs = curTime.mTimeUs;
+ }
+ mBuffers.erase(it, mBuffers.end());
+ mLatestEnqueuedMeta = newLatestEnqueuedMeta;
+ mLastQueuedTimeUs = newLastQueuedTimeUs;
+ mQueuedDiscontinuityCount = newDiscontinuityCount;
+}
+
+/*
+ * removes samples with time equal or before meta;
+ * returns first sample left in the queue.
+ *
+ * (for AVC, if trim happens, the samples left will always start
+ * at next IDR.)
+ */
+sp<AMessage> AnotherPacketSource::trimBuffersBeforeMeta(
+ const sp<AMessage> &meta) {
+ HLSTime startTime(meta);
+ ALOGV("trimBuffersBeforeMeta: discontinuitySeq %d, timeUs %lld",
+ startTime.mSeq, (long long)startTime.mTimeUs);
+
+ sp<AMessage> firstMeta;
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffers.empty()) {
+ return NULL;
+ }
+
+ sp<MetaData> format;
+ bool isAvc = false;
+
+ List<sp<ABuffer> >::iterator it;
+ size_t discontinuityCount = 0;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ format = NULL;
+ isAvc = false;
+ discontinuityCount++;
+ continue;
+ }
+ if (format == NULL) {
+ sp<RefBase> object;
+ if (buffer->meta()->findObject("format", &object)) {
+ const char* mime;
+ format = static_cast<MetaData*>(object.get());
+ isAvc = format != NULL
+ && format->findCString(kKeyMIMEType, &mime)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+ }
+ }
+ if (isAvc && !IsIDR(buffer)) {
+ continue;
+ }
+
+ HLSTime curTime(buffer->meta());
+ if (startTime < curTime) {
+ ALOGV("trimming from beginning to %lld (not inclusive)",
+ (long long)curTime.mTimeUs);
+ firstMeta = buffer->meta();
+ break;
+ }
+ }
+ mBuffers.erase(mBuffers.begin(), it);
+ mQueuedDiscontinuityCount -= discontinuityCount;
+ mLatestDequeuedMeta = NULL;
+ return firstMeta;
+}
+
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index 809a858..fa7dd6a 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -43,8 +43,12 @@ struct AnotherPacketSource : public MediaSource {
void clear();
+ // Returns true if we have any packets including discontinuities
bool hasBufferAvailable(status_t *finalResult);
+ // Returns true if we have packets that's not discontinuities
+ bool hasDataBufferAvailable(status_t *finalResult);
+
// Returns the difference between the last and the first queued
// presentation timestamps since the last discontinuity (if any).
int64_t getBufferedDurationUs(status_t *finalResult);
@@ -66,8 +70,14 @@ struct AnotherPacketSource : public MediaSource {
bool isFinished(int64_t duration) const;
+ void enable(bool enable);
+
sp<AMessage> getLatestEnqueuedMeta();
sp<AMessage> getLatestDequeuedMeta();
+ sp<AMessage> getMetaAfterLastDequeued(int64_t delayUs);
+
+ void trimBuffersAfterMeta(const sp<AMessage> &meta);
+ sp<AMessage> trimBuffersBeforeMeta(const sp<AMessage> &meta);
protected:
virtual ~AnotherPacketSource();
@@ -78,6 +88,7 @@ private:
bool mIsAudio;
bool mIsVideo;
+ bool mEnabled;
sp<MetaData> mFormat;
int64_t mLastQueuedTimeUs;
List<sp<ABuffer> > mBuffers;
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index f257289..a279049 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -533,6 +533,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAC3() {
int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
CHECK_GE(timeUs, 0ll);
accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
memmove(
mBuffer->data(),
@@ -582,6 +583,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {
int64_t timeUs = fetchTimestamp(payloadSize + 4);
CHECK_GE(timeUs, 0ll);
accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
int16_t *ptr = (int16_t *)accessUnit->data();
for (size_t i = 0; i < payloadSize / sizeof(int16_t); ++i) {
@@ -617,8 +619,6 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
// having to interpolate.
// The final AAC frame may well extend into the next RangeInfo but
// that's ok.
- // TODO: the logic commented above is skipped because codec cannot take
- // arbitrary sized input buffers;
size_t offset = 0;
while (offset < info.mLength) {
if (offset + 7 > mBuffer->size()) {
@@ -683,12 +683,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
size_t headerSize __unused = protection_absent ? 7 : 9;
offset += aac_frame_length;
- // TODO: move back to concatenation when codec can support arbitrary input buffers.
- // For now only queue a single buffer
- break;
}
- int64_t timeUs = fetchTimestampAAC(offset);
+ int64_t timeUs = fetchTimestamp(offset);
sp<ABuffer> accessUnit = new ABuffer(offset);
memcpy(accessUnit->data(), mBuffer->data(), offset);
@@ -698,6 +695,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
mBuffer->setRange(0, mBuffer->size() - offset);
accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
return accessUnit;
}
@@ -735,50 +733,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
return timeUs;
}
-// TODO: avoid interpolating timestamps once codec supports arbitrary sized input buffers
-int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) {
- int64_t timeUs = -1;
- bool first = true;
-
- size_t samplesize = size;
- while (size > 0) {
- CHECK(!mRangeInfos.empty());
-
- RangeInfo *info = &*mRangeInfos.begin();
-
- if (first) {
- timeUs = info->mTimestampUs;
- first = false;
- }
-
- if (info->mLength > size) {
- int32_t sampleRate;
- CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));
- info->mLength -= size;
- size_t numSamples = 1024 * size / samplesize;
- info->mTimestampUs += numSamples * 1000000ll / sampleRate;
- size = 0;
- } else {
- size -= info->mLength;
-
- mRangeInfos.erase(mRangeInfos.begin());
- info = NULL;
- }
-
- }
-
- if (timeUs == 0ll) {
- ALOGV("Returning 0 timestamp");
- }
-
- return timeUs;
-}
-
-struct NALPosition {
- size_t nalOffset;
- size_t nalSize;
-};
-
sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
const uint8_t *data = mBuffer->data();
@@ -786,11 +740,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
Vector<NALPosition> nals;
size_t totalSize = 0;
+ size_t seiCount = 0;
status_t err;
const uint8_t *nalStart;
size_t nalSize;
bool foundSlice = false;
+ bool foundIDR = false;
while ((err = getNextNALUnit(&data, &size, &nalStart, &nalSize)) == OK) {
if (nalSize == 0) continue;
@@ -798,6 +754,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
bool flush = false;
if (nalType == 1 || nalType == 5) {
+ if (nalType == 5) {
+ foundIDR = true;
+ }
if (foundSlice) {
ABitReader br(nalStart + 1, nalSize);
unsigned first_mb_in_slice = parseUE(&br);
@@ -815,6 +774,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
// next frame.
flush = true;
+ } else if (nalType == 6 && nalSize > 0) {
+ // found non-zero sized SEI
+ ++seiCount;
}
if (flush) {
@@ -823,21 +785,29 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
size_t auSize = 4 * nals.size() + totalSize;
sp<ABuffer> accessUnit = new ABuffer(auSize);
+ sp<ABuffer> sei;
+
+ if (seiCount > 0) {
+ sei = new ABuffer(seiCount * sizeof(NALPosition));
+ accessUnit->meta()->setBuffer("sei", sei);
+ }
#if !LOG_NDEBUG
AString out;
#endif
size_t dstOffset = 0;
+ size_t seiIndex = 0;
for (size_t i = 0; i < nals.size(); ++i) {
const NALPosition &pos = nals.itemAt(i);
unsigned nalType = mBuffer->data()[pos.nalOffset] & 0x1f;
- if (nalType == 6) {
- sp<ABuffer> sei = new ABuffer(pos.nalSize);
- memcpy(sei->data(), mBuffer->data() + pos.nalOffset, pos.nalSize);
- accessUnit->meta()->setBuffer("sei", sei);
+ if (nalType == 6 && pos.nalSize > 0) {
+ CHECK_LT(seiIndex, sei->size() / sizeof(NALPosition));
+ NALPosition &seiPos = ((NALPosition *)sei->data())[seiIndex++];
+ seiPos.nalOffset = dstOffset + 4;
+ seiPos.nalSize = pos.nalSize;
}
#if !LOG_NDEBUG
@@ -875,6 +845,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
CHECK_GE(timeUs, 0ll);
accessUnit->meta()->setInt64("timeUs", timeUs);
+ if (foundIDR) {
+ accessUnit->meta()->setInt32("isSync", 1);
+ }
if (mFormat == NULL) {
mFormat = MakeAVCCodecSpecificData(accessUnit);
@@ -931,6 +904,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {
CHECK_GE(timeUs, 0ll);
accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
if (mFormat == NULL) {
mFormat = new MetaData;
@@ -1007,6 +981,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {
int pprevStartCode = -1;
int prevStartCode = -1;
int currentStartCode = -1;
+ bool gopFound = false;
+ bool isClosedGop = false;
+ bool brokenLink = false;
size_t offset = 0;
while (offset + 3 < size) {
@@ -1069,6 +1046,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {
}
}
+ if (mFormat != NULL && currentStartCode == 0xb8) {
+ // GOP layer
+ gopFound = true;
+ isClosedGop = (data[offset + 7] & 0x40) != 0;
+ brokenLink = (data[offset + 7] & 0x20) != 0;
+ }
+
if (mFormat != NULL && currentStartCode == 0x00) {
// Picture start
@@ -1090,6 +1074,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {
offset = 0;
accessUnit->meta()->setInt64("timeUs", timeUs);
+ if (gopFound && (!brokenLink || isClosedGop)) {
+ accessUnit->meta()->setInt32("isSync", 1);
+ }
ALOGV("returning MPEG video access unit at time %" PRId64 " us",
timeUs);
@@ -1234,6 +1221,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {
case SKIP_TO_VOP_START:
{
if (chunkType == 0xb6) {
+ int vopCodingType = (data[offset + 4] & 0xc0) >> 6;
+
offset += chunkSize;
sp<ABuffer> accessUnit = new ABuffer(offset);
@@ -1249,6 +1238,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {
offset = 0;
accessUnit->meta()->setInt64("timeUs", timeUs);
+ if (vopCodingType == 0) { // intra-coded VOP
+ accessUnit->meta()->setInt32("isSync", 1);
+ }
ALOGV("returning MPEG4 video access unit at time %" PRId64 " us",
timeUs);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index eb4b1c9..45b4624 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -77,7 +77,6 @@ private:
// consume a logical (compressed) access unit of size "size",
// returns its timestamp in us (or -1 if no time information).
int64_t fetchTimestamp(size_t size);
- int64_t fetchTimestampAAC(size_t size);
DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue);
};
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index aaa8334..07ea605 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -1,11 +1,8 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
-ifeq ($(TARGET_DEVICE), manta)
- LOCAL_CFLAGS += -DSURFACE_IS_BGR32
-endif
-
LOCAL_SRC_FILES:= \
+ FrameDropper.cpp \
GraphicBufferSource.cpp \
OMX.cpp \
OMXMaster.cpp \
diff --git a/media/libstagefright/omx/FrameDropper.cpp b/media/libstagefright/omx/FrameDropper.cpp
new file mode 100644
index 0000000..9fba0b7
--- /dev/null
+++ b/media/libstagefright/omx/FrameDropper.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameDropper"
+#include <utils/Log.h>
+
+#include "FrameDropper.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+static const int64_t kMaxJitterUs = 2000;
+
+FrameDropper::FrameDropper()
+ : mDesiredMinTimeUs(-1),
+ mMinIntervalUs(0) {
+}
+
+FrameDropper::~FrameDropper() {
+}
+
+status_t FrameDropper::setMaxFrameRate(float maxFrameRate) {
+ if (maxFrameRate <= 0) {
+ ALOGE("framerate should be positive but got %f.", maxFrameRate);
+ return BAD_VALUE;
+ }
+ mMinIntervalUs = (int64_t) (1000000.0f / maxFrameRate);
+ return OK;
+}
+
+bool FrameDropper::shouldDrop(int64_t timeUs) {
+ if (mMinIntervalUs <= 0) {
+ return false;
+ }
+
+ if (mDesiredMinTimeUs < 0) {
+ mDesiredMinTimeUs = timeUs + mMinIntervalUs;
+ ALOGV("first frame %lld, next desired frame %lld", timeUs, mDesiredMinTimeUs);
+ return false;
+ }
+
+ if (timeUs < (mDesiredMinTimeUs - kMaxJitterUs)) {
+ ALOGV("drop frame %lld, desired frame %lld, diff %lld",
+ timeUs, mDesiredMinTimeUs, mDesiredMinTimeUs - timeUs);
+ return true;
+ }
+
+ int64_t n = (timeUs - mDesiredMinTimeUs + kMaxJitterUs) / mMinIntervalUs;
+ mDesiredMinTimeUs += (n + 1) * mMinIntervalUs;
+ ALOGV("keep frame %lld, next desired frame %lld, diff %lld",
+ timeUs, mDesiredMinTimeUs, mDesiredMinTimeUs - timeUs);
+ return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/FrameDropper.h b/media/libstagefright/omx/FrameDropper.h
new file mode 100644
index 0000000..c5a6d4b
--- /dev/null
+++ b/media/libstagefright/omx/FrameDropper.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_DROPPER_H_
+
+#define FRAME_DROPPER_H_
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct FrameDropper : public RefBase {
+ // No frames will be dropped until a valid max frame rate is set.
+ FrameDropper();
+
+ // maxFrameRate required to be positive.
+ status_t setMaxFrameRate(float maxFrameRate);
+
+ // Returns false if max frame rate has not been set via setMaxFrameRate.
+ bool shouldDrop(int64_t timeUs);
+
+protected:
+ virtual ~FrameDropper();
+
+private:
+ int64_t mDesiredMinTimeUs;
+ int64_t mMinIntervalUs;
+
+ DISALLOW_EVIL_CONSTRUCTORS(FrameDropper);
+};
+
+} // namespace android
+
+#endif // FRAME_DROPPER_H_
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 2945644..477cfc6 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -31,6 +31,7 @@
#include <gui/BufferItem.h>
#include <inttypes.h>
+#include "FrameDropper.h"
namespace android {
@@ -54,9 +55,9 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
mRepeatAfterUs(-1ll),
mRepeatLastFrameGeneration(0),
mRepeatLastFrameTimestamp(-1ll),
- mLatestSubmittedBufferId(-1),
- mLatestSubmittedBufferFrameNum(0),
- mLatestSubmittedBufferUseCount(0),
+ mLatestBufferId(-1),
+ mLatestBufferFrameNum(0),
+ mLatestBufferUseCount(0),
mRepeatBufferDeferred(false),
mTimePerCaptureUs(-1ll),
mTimePerFrameUs(-1ll),
@@ -153,9 +154,9 @@ void GraphicBufferSource::omxExecuting() {
mLooper->registerHandler(mReflector);
mLooper->start();
- if (mLatestSubmittedBufferId >= 0) {
+ if (mLatestBufferId >= 0) {
sp<AMessage> msg =
- new AMessage(kWhatRepeatLastFrame, mReflector->id());
+ new AMessage(kWhatRepeatLastFrame, mReflector);
msg->setInt32("generation", ++mRepeatLastFrameGeneration);
msg->post(mRepeatAfterUs);
@@ -288,8 +289,8 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
ALOGV("cbi %d matches bq slot %d, handle=%p",
cbi, id, mBufferSlot[id]->handle);
- if (id == mLatestSubmittedBufferId) {
- CHECK_GT(mLatestSubmittedBufferUseCount--, 0);
+ if (id == mLatestBufferId) {
+ CHECK_GT(mLatestBufferUseCount--, 0);
} else {
mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
@@ -314,11 +315,11 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
ALOGV("buffer freed, EOS pending");
submitEndOfInputStream_l();
} else if (mRepeatBufferDeferred) {
- bool success = repeatLatestSubmittedBuffer_l();
+ bool success = repeatLatestBuffer_l();
if (success) {
- ALOGV("deferred repeatLatestSubmittedBuffer_l SUCCESS");
+ ALOGV("deferred repeatLatestBuffer_l SUCCESS");
} else {
- ALOGV("deferred repeatLatestSubmittedBuffer_l FAILURE");
+ ALOGV("deferred repeatLatestBuffer_l FAILURE");
}
mRepeatBufferDeferred = false;
}
@@ -383,12 +384,12 @@ void GraphicBufferSource::suspend(bool suspend) {
mSuspended = false;
if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
- if (repeatLatestSubmittedBuffer_l()) {
- ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l SUCCESS");
+ if (repeatLatestBuffer_l()) {
+ ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
mRepeatBufferDeferred = false;
} else {
- ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l FAILURE");
+ ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
}
}
}
@@ -442,12 +443,22 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
// only submit sample if start time is unspecified, or sample
// is queued after the specified start time
+ bool dropped = false;
if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
// if start time is set, offset time stamp by start time
if (mSkipFramesBeforeNs > 0) {
item.mTimestamp -= mSkipFramesBeforeNs;
}
- err = submitBuffer_l(item, cbi);
+
+ int64_t timeUs = item.mTimestamp / 1000;
+ if (mFrameDropper != NULL && mFrameDropper->shouldDrop(timeUs)) {
+ ALOGV("skipping frame (%lld) to meet max framerate", static_cast<long long>(timeUs));
+ // set err to OK so that the skipped frame can still be saved as the lastest frame
+ err = OK;
+ dropped = true;
+ } else {
+ err = submitBuffer_l(item, cbi);
+ }
}
if (err != OK) {
@@ -456,46 +467,46 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
- setLatestSubmittedBuffer_l(item);
+ setLatestBuffer_l(item, dropped);
}
return true;
}
-bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
+bool GraphicBufferSource::repeatLatestBuffer_l() {
CHECK(mExecuting && mNumFramesAvailable == 0);
- if (mLatestSubmittedBufferId < 0 || mSuspended) {
+ if (mLatestBufferId < 0 || mSuspended) {
return false;
}
- if (mBufferSlot[mLatestSubmittedBufferId] == NULL) {
+ if (mBufferSlot[mLatestBufferId] == NULL) {
// This can happen if the remote side disconnects, causing
// onBuffersReleased() to NULL out our copy of the slots. The
// buffer is gone, so we have nothing to show.
//
// To be on the safe side we try to release the buffer.
- ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL");
+ ALOGD("repeatLatestBuffer_l: slot was NULL");
mConsumer->releaseBuffer(
- mLatestSubmittedBufferId,
- mLatestSubmittedBufferFrameNum,
+ mLatestBufferId,
+ mLatestBufferFrameNum,
EGL_NO_DISPLAY,
EGL_NO_SYNC_KHR,
Fence::NO_FENCE);
- mLatestSubmittedBufferId = -1;
- mLatestSubmittedBufferFrameNum = 0;
+ mLatestBufferId = -1;
+ mLatestBufferFrameNum = 0;
return false;
}
int cbi = findAvailableCodecBuffer_l();
if (cbi < 0) {
// No buffers available, bail.
- ALOGV("repeatLatestSubmittedBuffer_l: no codec buffers.");
+ ALOGV("repeatLatestBuffer_l: no codec buffers.");
return false;
}
BufferItem item;
- item.mBuf = mLatestSubmittedBufferId;
- item.mFrameNumber = mLatestSubmittedBufferFrameNum;
+ item.mBuf = mLatestBufferId;
+ item.mFrameNumber = mLatestBufferFrameNum;
item.mTimestamp = mRepeatLastFrameTimestamp;
status_t err = submitBuffer_l(item, cbi);
@@ -504,7 +515,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
return false;
}
- ++mLatestSubmittedBufferUseCount;
+ ++mLatestBufferUseCount;
/* repeat last frame up to kRepeatLastFrameCount times.
* in case of static scene, a single repeat might not get rid of encoder
@@ -514,7 +525,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
if (mReflector != NULL) {
- sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);
msg->setInt32("generation", ++mRepeatLastFrameGeneration);
msg->post(mRepeatAfterUs);
}
@@ -523,31 +534,31 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
return true;
}
-void GraphicBufferSource::setLatestSubmittedBuffer_l(
- const BufferItem &item) {
- ALOGV("setLatestSubmittedBuffer_l");
+void GraphicBufferSource::setLatestBuffer_l(
+ const BufferItem &item, bool dropped) {
+ ALOGV("setLatestBuffer_l");
- if (mLatestSubmittedBufferId >= 0) {
- if (mLatestSubmittedBufferUseCount == 0) {
+ if (mLatestBufferId >= 0) {
+ if (mLatestBufferUseCount == 0) {
mConsumer->releaseBuffer(
- mLatestSubmittedBufferId,
- mLatestSubmittedBufferFrameNum,
+ mLatestBufferId,
+ mLatestBufferFrameNum,
EGL_NO_DISPLAY,
EGL_NO_SYNC_KHR,
Fence::NO_FENCE);
}
}
- mLatestSubmittedBufferId = item.mBuf;
- mLatestSubmittedBufferFrameNum = item.mFrameNumber;
+ mLatestBufferId = item.mBuf;
+ mLatestBufferFrameNum = item.mFrameNumber;
mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
- mLatestSubmittedBufferUseCount = 1;
+ mLatestBufferUseCount = dropped ? 0 : 1;
mRepeatBufferDeferred = false;
mRepeatLastFrameCount = kRepeatLastFrameCount;
if (mReflector != NULL) {
- sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);
msg->setInt32("generation", ++mRepeatLastFrameGeneration);
msg->post(mRepeatAfterUs);
}
@@ -842,6 +853,23 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
return OK;
}
+status_t GraphicBufferSource::setMaxFps(float maxFps) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting) {
+ return INVALID_OPERATION;
+ }
+
+ mFrameDropper = new FrameDropper();
+ status_t err = mFrameDropper->setMaxFrameRate(maxFps);
+ if (err != OK) {
+ mFrameDropper.clear();
+ return err;
+ }
+
+ return OK;
+}
+
void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
Mutex::Autolock autoLock(mMutex);
@@ -880,12 +908,12 @@ void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- bool success = repeatLatestSubmittedBuffer_l();
+ bool success = repeatLatestBuffer_l();
if (success) {
- ALOGV("repeatLatestSubmittedBuffer_l SUCCESS");
+ ALOGV("repeatLatestBuffer_l SUCCESS");
} else {
- ALOGV("repeatLatestSubmittedBuffer_l FAILURE");
+ ALOGV("repeatLatestBuffer_l FAILURE");
mRepeatBufferDeferred = true;
}
break;
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 401bbc3..1067472 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -30,6 +30,8 @@
namespace android {
+class FrameDropper;
+
/*
* This class is used to feed OMX codecs from a Surface via BufferQueue.
*
@@ -119,6 +121,9 @@ public:
// of suspension on input.
status_t setMaxTimestampGapUs(int64_t maxGapUs);
+ // When set, the max frame rate fed to the encoder will be capped at maxFps.
+ status_t setMaxFps(float maxFps);
+
// Sets the time lapse (or slow motion) parameters.
// data[0] is the time (us) between two frames for playback
// data[1] is the time (us) between two frames for capture
@@ -193,8 +198,8 @@ private:
// doing anything if we don't have a codec buffer available.
void submitEndOfInputStream_l();
- void setLatestSubmittedBuffer_l(const BufferItem &item);
- bool repeatLatestSubmittedBuffer_l();
+ void setLatestBuffer_l(const BufferItem &item, bool dropped);
+ bool repeatLatestBuffer_l();
int64_t getTimestamp(const BufferItem &item);
// Lock, covers all member variables.
@@ -250,6 +255,8 @@ private:
int64_t mPrevModifiedTimeUs;
int64_t mSkipFramesBeforeNs;
+ sp<FrameDropper> mFrameDropper;
+
sp<ALooper> mLooper;
sp<AHandlerReflector<GraphicBufferSource> > mReflector;
@@ -258,11 +265,11 @@ private:
int64_t mRepeatLastFrameTimestamp;
int32_t mRepeatLastFrameCount;
- int mLatestSubmittedBufferId;
- uint64_t mLatestSubmittedBufferFrameNum;
- int32_t mLatestSubmittedBufferUseCount;
+ int mLatestBufferId;
+ uint64_t mLatestBufferFrameNum;
+ int32_t mLatestBufferUseCount;
- // The previously submitted buffer should've been repeated but
+ // The previous buffer should've been repeated but
// no codec buffer was available at the time.
bool mRepeatBufferDeferred;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index ab7419f..4779d6a 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -1075,6 +1075,7 @@ inline static const char *asString(IOMX::InternalOptionType i, const char *def =
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
return "REPEAT_PREVIOUS_FRAME_DELAY";
case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP";
+ case IOMX::INTERNAL_OPTION_MAX_FPS: return "MAX_FPS";
case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME";
case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE";
default: return def;
@@ -1092,6 +1093,7 @@ status_t OMXNodeInstance::setInternalOption(
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
+ case IOMX::INTERNAL_OPTION_MAX_FPS:
case IOMX::INTERNAL_OPTION_START_TIME:
case IOMX::INTERNAL_OPTION_TIME_LAPSE:
{
@@ -1129,6 +1131,14 @@ status_t OMXNodeInstance::setInternalOption(
int64_t maxGapUs = *(int64_t *)data;
CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
return bufferSource->setMaxTimestampGapUs(maxGapUs);
+ } else if (type == IOMX::INTERNAL_OPTION_MAX_FPS) {
+ if (size != sizeof(float)) {
+ return INVALID_OPERATION;
+ }
+
+ float maxFps = *(float *)data;
+ CLOG_CONFIG(setInternalOption, "maxFps=%f", maxFps);
+ return bufferSource->setMaxFps(maxFps);
} else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
if (size != sizeof(int64_t)) {
return INVALID_OPERATION;
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 7f99dcd..801a1bd 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -58,7 +58,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::sendCommand(
OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) {
CHECK(data == NULL);
- sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler->id());
+ sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler);
msg->setInt32("cmd", cmd);
msg->setInt32("param", param);
msg->post();
@@ -307,7 +307,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::freeBuffer(
OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(
OMX_BUFFERHEADERTYPE *buffer) {
- sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler->id());
+ sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler);
msg->setPointer("header", buffer);
msg->post();
@@ -316,7 +316,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(
OMX_ERRORTYPE SimpleSoftOMXComponent::fillThisBuffer(
OMX_BUFFERHEADERTYPE *buffer) {
- sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler->id());
+ sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler);
msg->setPointer("header", buffer);
msg->post();
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index 447b29e..9be637a 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -20,3 +20,21 @@ LOCAL_MODULE_TAGS := tests
LOCAL_32_BIT_ONLY := true
include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := FrameDropper_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ FrameDropper_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright_omx \
+ libutils \
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/media/libstagefright/omx \
+
+include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/omx/tests/FrameDropper_test.cpp b/media/libstagefright/omx/tests/FrameDropper_test.cpp
new file mode 100644
index 0000000..4ac72c4
--- /dev/null
+++ b/media/libstagefright/omx/tests/FrameDropper_test.cpp
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameDropper_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include "FrameDropper.h"
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+struct TestFrame {
+ int64_t timeUs;
+ bool shouldDrop;
+};
+
+static const TestFrame testFrames20Fps[] = {
+ {1000000, false}, {1050000, false}, {1100000, false}, {1150000, false},
+ {1200000, false}, {1250000, false}, {1300000, false}, {1350000, false},
+ {1400000, false}, {1450000, false}, {1500000, false}, {1550000, false},
+ {1600000, false}, {1650000, false}, {1700000, false}, {1750000, false},
+ {1800000, false}, {1850000, false}, {1900000, false}, {1950000, false},
+};
+
+static const TestFrame testFrames30Fps[] = {
+ {1000000, false}, {1033333, false}, {1066667, false}, {1100000, false},
+ {1133333, false}, {1166667, false}, {1200000, false}, {1233333, false},
+ {1266667, false}, {1300000, false}, {1333333, false}, {1366667, false},
+ {1400000, false}, {1433333, false}, {1466667, false}, {1500000, false},
+ {1533333, false}, {1566667, false}, {1600000, false}, {1633333, false},
+};
+
+static const TestFrame testFrames40Fps[] = {
+ {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false},
+ {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false},
+ {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false},
+ {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false},
+ {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false},
+};
+
+static const TestFrame testFrames60Fps[] = {
+ {1000000, false}, {1016667, true}, {1033333, false}, {1050000, true},
+ {1066667, false}, {1083333, true}, {1100000, false}, {1116667, true},
+ {1133333, false}, {1150000, true}, {1166667, false}, {1183333, true},
+ {1200000, false}, {1216667, true}, {1233333, false}, {1250000, true},
+ {1266667, false}, {1283333, true}, {1300000, false}, {1316667, true},
+};
+
+static const TestFrame testFramesVariableFps[] = {
+ // 40fps
+ {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false},
+ {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false},
+ {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false},
+ {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false},
+ {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false},
+ // a timestamp jump plus switch to 20fps
+ {2000000, false}, {2050000, false}, {2100000, false}, {2150000, false},
+ {2200000, false}, {2250000, false}, {2300000, false}, {2350000, false},
+ {2400000, false}, {2450000, false}, {2500000, false}, {2550000, false},
+ {2600000, false}, {2650000, false}, {2700000, false}, {2750000, false},
+ {2800000, false}, {2850000, false}, {2900000, false}, {2950000, false},
+ // 60fps
+ {2966667, false}, {2983333, true}, {3000000, false}, {3016667, true},
+ {3033333, false}, {3050000, true}, {3066667, false}, {3083333, true},
+ {3100000, false}, {3116667, true}, {3133333, false}, {3150000, true},
+ {3166667, false}, {3183333, true}, {3200000, false}, {3216667, true},
+ {3233333, false}, {3250000, true}, {3266667, false}, {3283333, true},
+};
+
+static const int kMaxTestJitterUs = 2000;
+// return one of 1000, 0, -1000 as jitter.
+static int GetJitter(size_t i) {
+ return (1 - (i % 3)) * (kMaxTestJitterUs / 2);
+}
+
+class FrameDropperTest : public ::testing::Test {
+public:
+ FrameDropperTest() : mFrameDropper(new FrameDropper()) {
+ EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(30.0));
+ }
+
+protected:
+ void RunTest(const TestFrame* frames, size_t size) {
+ for (size_t i = 0; i < size; ++i) {
+ int jitter = GetJitter(i);
+ int64_t testTimeUs = frames[i].timeUs + jitter;
+ printf("time %lld, testTime %lld, jitter %d\n", frames[i].timeUs, testTimeUs, jitter);
+ EXPECT_EQ(frames[i].shouldDrop, mFrameDropper->shouldDrop(testTimeUs));
+ }
+ }
+
+ sp<FrameDropper> mFrameDropper;
+};
+
+TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) {
+ EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0));
+ EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0));
+}
+
+TEST_F(FrameDropperTest, Test20Fps) {
+ RunTest(testFrames20Fps, ARRAY_SIZE(testFrames20Fps));
+}
+
+TEST_F(FrameDropperTest, Test30Fps) {
+ RunTest(testFrames30Fps, ARRAY_SIZE(testFrames30Fps));
+}
+
+TEST_F(FrameDropperTest, Test40Fps) {
+ RunTest(testFrames40Fps, ARRAY_SIZE(testFrames40Fps));
+}
+
+TEST_F(FrameDropperTest, Test60Fps) {
+ RunTest(testFrames60Fps, ARRAY_SIZE(testFrames60Fps));
+}
+
+TEST_F(FrameDropperTest, TestVariableFps) {
+ RunTest(testFramesVariableFps, ARRAY_SIZE(testFramesVariableFps));
+}
+
+} // namespace android
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a6bd824..a86ab74 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -82,7 +82,7 @@ void ARTPConnection::addStream(
size_t index,
const sp<AMessage> &notify,
bool injected) {
- sp<AMessage> msg = new AMessage(kWhatAddStream, id());
+ sp<AMessage> msg = new AMessage(kWhatAddStream, this);
msg->setInt32("rtp-socket", rtpSocket);
msg->setInt32("rtcp-socket", rtcpSocket);
msg->setObject("session-desc", sessionDesc);
@@ -93,7 +93,7 @@ void ARTPConnection::addStream(
}
void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) {
- sp<AMessage> msg = new AMessage(kWhatRemoveStream, id());
+ sp<AMessage> msg = new AMessage(kWhatRemoveStream, this);
msg->setInt32("rtp-socket", rtpSocket);
msg->setInt32("rtcp-socket", rtcpSocket);
msg->post();
@@ -233,7 +233,7 @@ void ARTPConnection::postPollEvent() {
return;
}
- sp<AMessage> msg = new AMessage(kWhatPollStreams, id());
+ sp<AMessage> msg = new AMessage(kWhatPollStreams, this);
msg->post();
mPollEventPending = true;
@@ -639,7 +639,7 @@ sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) {
}
void ARTPConnection::injectPacket(int index, const sp<ABuffer> &buffer) {
- sp<AMessage> msg = new AMessage(kWhatInjectPacket, id());
+ sp<AMessage> msg = new AMessage(kWhatInjectPacket, this);
msg->setInt32("index", index);
msg->setBuffer("buffer", buffer);
msg->post();
diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp
index ba4e33c..e5acb06 100644
--- a/media/libstagefright/rtsp/ARTPSession.cpp
+++ b/media/libstagefright/rtsp/ARTPSession.cpp
@@ -82,7 +82,7 @@ status_t ARTPSession::setup(const sp<ASessionDescription> &desc) {
info->mRTPSocket = rtpSocket;
info->mRTCPSocket = rtcpSocket;
- sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, id());
+ sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, this);
notify->setSize("track-index", mTracks.size() - 1);
mRTPConn->addStream(
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index e1607bf..56c4aa6 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -146,7 +146,7 @@ status_t ARTPWriter::start(MetaData * /* params */) {
TRESPASS();
}
- (new AMessage(kWhatStart, mReflector->id()))->post();
+ (new AMessage(kWhatStart, mReflector))->post();
while (!(mFlags & kFlagStarted)) {
mCondition.wait(mLock);
@@ -161,7 +161,7 @@ status_t ARTPWriter::stop() {
return OK;
}
- (new AMessage(kWhatStop, mReflector->id()))->post();
+ (new AMessage(kWhatStop, mReflector))->post();
while (mFlags & kFlagStarted) {
mCondition.wait(mLock);
@@ -213,8 +213,8 @@ void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {
mCondition.signal();
}
- (new AMessage(kWhatRead, mReflector->id()))->post();
- (new AMessage(kWhatSendSR, mReflector->id()))->post();
+ (new AMessage(kWhatRead, mReflector))->post();
+ (new AMessage(kWhatSendSR, mReflector))->post();
break;
}
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 60b3aaf..855ffdc 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -68,28 +68,28 @@ ARTSPConnection::~ARTSPConnection() {
}
void ARTSPConnection::connect(const char *url, const sp<AMessage> &reply) {
- sp<AMessage> msg = new AMessage(kWhatConnect, id());
+ sp<AMessage> msg = new AMessage(kWhatConnect, this);
msg->setString("url", url);
msg->setMessage("reply", reply);
msg->post();
}
void ARTSPConnection::disconnect(const sp<AMessage> &reply) {
- sp<AMessage> msg = new AMessage(kWhatDisconnect, id());
+ sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
msg->setMessage("reply", reply);
msg->post();
}
void ARTSPConnection::sendRequest(
const char *request, const sp<AMessage> &reply) {
- sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, this);
msg->setString("request", request);
msg->setMessage("reply", reply);
msg->post();
}
void ARTSPConnection::observeBinaryData(const sp<AMessage> &reply) {
- sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, id());
+ sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, this);
msg->setMessage("reply", reply);
msg->post();
}
@@ -286,7 +286,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
if (err < 0) {
if (errno == EINPROGRESS) {
- sp<AMessage> msg = new AMessage(kWhatCompleteConnection, id());
+ sp<AMessage> msg = new AMessage(kWhatCompleteConnection, this);
msg->setMessage("reply", reply);
msg->setInt32("connection-id", mConnectionID);
msg->post();
@@ -523,7 +523,7 @@ void ARTSPConnection::postReceiveReponseEvent() {
return;
}
- sp<AMessage> msg = new AMessage(kWhatReceiveResponse, id());
+ sp<AMessage> msg = new AMessage(kWhatReceiveResponse, this);
msg->post();
mReceiveResponseEventPending = true;
@@ -746,7 +746,7 @@ bool ARTSPConnection::receiveRTSPReponse() {
AString request;
CHECK(reply->findString("original-request", &request));
- sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, this);
msg->setMessage("reply", reply);
msg->setString("request", request.c_str(), request.size());
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 3bf489b..0642343 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -169,10 +169,10 @@ struct MyHandler : public AHandler {
looper()->registerHandler(mConn);
(1 ? mNetLooper : looper())->registerHandler(mRTPConn);
- sp<AMessage> notify = new AMessage('biny', id());
+ sp<AMessage> notify = new AMessage('biny', this);
mConn->observeBinaryData(notify);
- sp<AMessage> reply = new AMessage('conn', id());
+ sp<AMessage> reply = new AMessage('conn', this);
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
@@ -180,10 +180,10 @@ struct MyHandler : public AHandler {
looper()->registerHandler(mConn);
(1 ? mNetLooper : looper())->registerHandler(mRTPConn);
- sp<AMessage> notify = new AMessage('biny', id());
+ sp<AMessage> notify = new AMessage('biny', this);
mConn->observeBinaryData(notify);
- sp<AMessage> reply = new AMessage('sdpl', id());
+ sp<AMessage> reply = new AMessage('sdpl', this);
reply->setObject("description", desc);
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
@@ -210,11 +210,11 @@ struct MyHandler : public AHandler {
}
void disconnect() {
- (new AMessage('abor', id()))->post();
+ (new AMessage('abor', this))->post();
}
void seek(int64_t timeUs) {
- sp<AMessage> msg = new AMessage('seek', id());
+ sp<AMessage> msg = new AMessage('seek', this);
msg->setInt64("time", timeUs);
mPauseGeneration++;
msg->post();
@@ -225,14 +225,14 @@ struct MyHandler : public AHandler {
}
void pause() {
- sp<AMessage> msg = new AMessage('paus', id());
+ sp<AMessage> msg = new AMessage('paus', this);
mPauseGeneration++;
msg->setInt32("pausecheck", mPauseGeneration);
msg->post(kPauseDelayUs);
}
void resume() {
- sp<AMessage> msg = new AMessage('resu', id());
+ sp<AMessage> msg = new AMessage('resu', this);
mPauseGeneration++;
msg->post();
}
@@ -454,10 +454,10 @@ struct MyHandler : public AHandler {
request.append("Accept: application/sdp\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('desc', id());
+ sp<AMessage> reply = new AMessage('desc', this);
mConn->sendRequest(request.c_str(), reply);
} else {
- (new AMessage('disc', id()))->post();
+ (new AMessage('disc', this))->post();
}
break;
}
@@ -468,10 +468,10 @@ struct MyHandler : public AHandler {
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
- sp<AMessage> reply = new AMessage('conn', id());
+ sp<AMessage> reply = new AMessage('conn', this);
mConn->connect(mOriginalSessionURL.c_str(), reply);
} else {
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
}
break;
}
@@ -514,7 +514,7 @@ struct MyHandler : public AHandler {
ALOGI("rewritten session url: '%s'", mSessionURL.c_str());
}
- sp<AMessage> reply = new AMessage('conn', id());
+ sp<AMessage> reply = new AMessage('conn', this);
mConn->connect(mOriginalSessionURL.c_str(), reply);
break;
}
@@ -586,7 +586,7 @@ struct MyHandler : public AHandler {
}
if (result != OK) {
- sp<AMessage> reply = new AMessage('disc', id());
+ sp<AMessage> reply = new AMessage('disc', this);
mConn->disconnect(reply);
}
break;
@@ -631,7 +631,7 @@ struct MyHandler : public AHandler {
}
if (result != OK) {
- sp<AMessage> reply = new AMessage('disc', id());
+ sp<AMessage> reply = new AMessage('disc', this);
mConn->disconnect(reply);
}
break;
@@ -703,7 +703,7 @@ struct MyHandler : public AHandler {
mSessionID.erase(i, mSessionID.size() - i);
}
- sp<AMessage> notify = new AMessage('accu', id());
+ sp<AMessage> notify = new AMessage('accu', this);
notify->setSize("track-index", trackIndex);
i = response->mHeaders.indexOfKey("transport");
@@ -769,10 +769,10 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> reply = new AMessage('play', id());
+ sp<AMessage> reply = new AMessage('play', this);
mConn->sendRequest(request.c_str(), reply);
} else {
- sp<AMessage> reply = new AMessage('disc', id());
+ sp<AMessage> reply = new AMessage('disc', this);
mConn->disconnect(reply);
}
break;
@@ -797,7 +797,7 @@ struct MyHandler : public AHandler {
} else {
parsePlayResponse(response);
- sp<AMessage> timeout = new AMessage('tiou', id());
+ sp<AMessage> timeout = new AMessage('tiou', this);
mCheckTimeoutGeneration++;
timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
timeout->post(kStartupTimeoutUs);
@@ -805,7 +805,7 @@ struct MyHandler : public AHandler {
}
if (result != OK) {
- sp<AMessage> reply = new AMessage('disc', id());
+ sp<AMessage> reply = new AMessage('disc', this);
mConn->disconnect(reply);
}
@@ -831,7 +831,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('opts', id());
+ sp<AMessage> reply = new AMessage('opts', this);
reply->setInt32("generation", mKeepAliveGeneration);
mConn->sendRequest(request.c_str(), reply);
break;
@@ -894,7 +894,7 @@ struct MyHandler : public AHandler {
mPausing = false;
mSeekable = true;
- sp<AMessage> reply = new AMessage('tear', id());
+ sp<AMessage> reply = new AMessage('tear', this);
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
@@ -926,7 +926,7 @@ struct MyHandler : public AHandler {
ALOGI("TEARDOWN completed with result %d (%s)",
result, strerror(-result));
- sp<AMessage> reply = new AMessage('disc', id());
+ sp<AMessage> reply = new AMessage('disc', this);
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
@@ -958,7 +958,7 @@ struct MyHandler : public AHandler {
if (mNumAccessUnitsReceived == 0) {
#if 1
ALOGI("stream ended? aborting.");
- (new AMessage('abor', id()))->post();
+ (new AMessage('abor', this))->post();
break;
#else
ALOGI("haven't seen an AU in a looong time.");
@@ -1077,7 +1077,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> reply = new AMessage('pau2', id());
+ sp<AMessage> reply = new AMessage('pau2', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -1114,7 +1114,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> reply = new AMessage('res2', id());
+ sp<AMessage> reply = new AMessage('res2', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -1143,7 +1143,7 @@ struct MyHandler : public AHandler {
// Post new timeout in order to make sure to use
// fake timestamps if no new Sender Reports arrive
- sp<AMessage> timeout = new AMessage('tiou', id());
+ sp<AMessage> timeout = new AMessage('tiou', this);
mCheckTimeoutGeneration++;
timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
timeout->post(kStartupTimeoutUs);
@@ -1152,7 +1152,7 @@ struct MyHandler : public AHandler {
if (result != OK) {
ALOGE("resume failed, aborting.");
- (new AMessage('abor', id()))->post();
+ (new AMessage('abor', this))->post();
}
mPausing = false;
@@ -1180,7 +1180,7 @@ struct MyHandler : public AHandler {
mCheckPending = true;
++mCheckGeneration;
- sp<AMessage> reply = new AMessage('see1', id());
+ sp<AMessage> reply = new AMessage('see1', this);
reply->setInt64("time", timeUs);
if (mPausing) {
@@ -1221,7 +1221,7 @@ struct MyHandler : public AHandler {
// Start new timeoutgeneration to avoid getting timeout
// before PLAY response arrive
- sp<AMessage> timeout = new AMessage('tiou', id());
+ sp<AMessage> timeout = new AMessage('tiou', this);
mCheckTimeoutGeneration++;
timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
timeout->post(kStartupTimeoutUs);
@@ -1243,7 +1243,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> reply = new AMessage('see2', id());
+ sp<AMessage> reply = new AMessage('see2', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -1277,7 +1277,7 @@ struct MyHandler : public AHandler {
// Post new timeout in order to make sure to use
// fake timestamps if no new Sender Reports arrive
- sp<AMessage> timeout = new AMessage('tiou', id());
+ sp<AMessage> timeout = new AMessage('tiou', this);
mCheckTimeoutGeneration++;
timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
timeout->post(kStartupTimeoutUs);
@@ -1293,7 +1293,7 @@ struct MyHandler : public AHandler {
if (result != OK) {
ALOGE("seek failed, aborting.");
- (new AMessage('abor', id()))->post();
+ (new AMessage('abor', this))->post();
}
mPausing = false;
@@ -1343,12 +1343,12 @@ struct MyHandler : public AHandler {
mTryTCPInterleaving = true;
- sp<AMessage> msg = new AMessage('abor', id());
+ sp<AMessage> msg = new AMessage('abor', this);
msg->setInt32("reconnect", true);
msg->post();
} else {
ALOGW("Never received any data, disconnecting.");
- (new AMessage('abor', id()))->post();
+ (new AMessage('abor', this))->post();
}
} else {
if (!mAllTracksHaveTime) {
@@ -1369,7 +1369,7 @@ struct MyHandler : public AHandler {
}
void postKeepAlive() {
- sp<AMessage> msg = new AMessage('aliv', id());
+ sp<AMessage> msg = new AMessage('aliv', this);
msg->setInt32("generation", mKeepAliveGeneration);
msg->post((mKeepAliveTimeoutUs * 9) / 10);
}
@@ -1380,7 +1380,7 @@ struct MyHandler : public AHandler {
}
mCheckPending = true;
- sp<AMessage> check = new AMessage('chek', id());
+ sp<AMessage> check = new AMessage('chek', this);
check->setInt32("generation", mCheckGeneration);
check->post(kAccessUnitTimeoutUs);
}
@@ -1566,7 +1566,7 @@ private:
if (source->initCheck() != OK) {
ALOGW("Unsupported format. Ignoring track #%d.", index);
- sp<AMessage> reply = new AMessage('setu', id());
+ sp<AMessage> reply = new AMessage('setu', this);
reply->setSize("index", index);
reply->setInt32("result", ERROR_UNSUPPORTED);
reply->post();
@@ -1652,7 +1652,7 @@ private:
request.append("\r\n");
- sp<AMessage> reply = new AMessage('setu', id());
+ sp<AMessage> reply = new AMessage('setu', this);
reply->setSize("index", index);
reply->setSize("track-index", mTracks.size() - 1);
mConn->sendRequest(request.c_str(), reply);
diff --git a/media/libstagefright/rtsp/MyTransmitter.h b/media/libstagefright/rtsp/MyTransmitter.h
index 009a3b1..369f276 100644
--- a/media/libstagefright/rtsp/MyTransmitter.h
+++ b/media/libstagefright/rtsp/MyTransmitter.h
@@ -100,7 +100,7 @@ struct MyTransmitter : public AHandler {
mLooper->registerHandler(this);
mLooper->registerHandler(mConn);
- sp<AMessage> reply = new AMessage('conn', id());
+ sp<AMessage> reply = new AMessage('conn', this);
mConn->connect(mServerURL.c_str(), reply);
#ifdef ANDROID
@@ -229,7 +229,7 @@ struct MyTransmitter : public AHandler {
request.append("\r\n");
request.append(sdp);
- sp<AMessage> reply = new AMessage('anno', id());
+ sp<AMessage> reply = new AMessage('anno', this);
mConn->sendRequest(request.c_str(), reply);
}
@@ -350,7 +350,7 @@ struct MyTransmitter : public AHandler {
<< result << " (" << strerror(-result) << ")";
if (result != OK) {
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
@@ -381,7 +381,7 @@ struct MyTransmitter : public AHandler {
if (response->mStatusCode == 401) {
if (mAuthType != NONE) {
LOG(INFO) << "FAILED to authenticate";
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
@@ -391,14 +391,14 @@ struct MyTransmitter : public AHandler {
}
if (result != OK || response->mStatusCode != 200) {
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
unsigned rtpPort;
ARTPConnection::MakePortPair(&mRTPSocket, &mRTCPSocket, &rtpPort);
- // (new AMessage('poll', id()))->post();
+ // (new AMessage('poll', this))->post();
AString request;
request.append("SETUP ");
@@ -414,7 +414,7 @@ struct MyTransmitter : public AHandler {
request.append(";mode=record\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('setu', id());
+ sp<AMessage> reply = new AMessage('setu', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -468,7 +468,7 @@ struct MyTransmitter : public AHandler {
}
if (result != OK || response->mStatusCode != 200) {
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
@@ -535,7 +535,7 @@ struct MyTransmitter : public AHandler {
request.append("\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('reco', id());
+ sp<AMessage> reply = new AMessage('reco', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -558,13 +558,13 @@ struct MyTransmitter : public AHandler {
}
if (result != OK) {
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
- (new AMessage('more', id()))->post();
- (new AMessage('sr ', id()))->post();
- (new AMessage('aliv', id()))->post(30000000ll);
+ (new AMessage('more', this))->post();
+ (new AMessage('sr ', this))->post();
+ (new AMessage('aliv', this))->post(30000000ll);
break;
}
@@ -586,7 +586,7 @@ struct MyTransmitter : public AHandler {
request.append("\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('opts', id());
+ sp<AMessage> reply = new AMessage('opts', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -603,7 +603,7 @@ struct MyTransmitter : public AHandler {
break;
}
- (new AMessage('aliv', id()))->post(30000000ll);
+ (new AMessage('aliv', this))->post(30000000ll);
break;
}
@@ -702,7 +702,7 @@ struct MyTransmitter : public AHandler {
request.append("\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('paus', id());
+ sp<AMessage> reply = new AMessage('paus', this);
mConn->sendRequest(request.c_str(), reply);
}
break;
@@ -753,7 +753,7 @@ struct MyTransmitter : public AHandler {
request.append("\r\n");
request.append("\r\n");
- sp<AMessage> reply = new AMessage('tear', id());
+ sp<AMessage> reply = new AMessage('tear', this);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -775,7 +775,7 @@ struct MyTransmitter : public AHandler {
CHECK(response != NULL);
}
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
@@ -784,14 +784,14 @@ struct MyTransmitter : public AHandler {
LOG(INFO) << "disconnect completed";
mConnected = false;
- (new AMessage('quit', id()))->post();
+ (new AMessage('quit', this))->post();
break;
}
case 'quit':
{
if (mConnected) {
- mConn->disconnect(new AMessage('disc', id()));
+ mConn->disconnect(new AMessage('disc', this));
break;
}
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index a24eb69..0f46c83 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -51,7 +51,7 @@ SDPLoader::SDPLoader(
void SDPLoader::load(const char *url, const KeyedVector<String8, String8> *headers) {
mNetLooper->registerHandler(this);
- sp<AMessage> msg = new AMessage(kWhatLoad, id());
+ sp<AMessage> msg = new AMessage(kWhatLoad, this);
msg->setString("url", url);
if (headers != NULL) {
diff --git a/media/libstagefright/rtsp/UDPPusher.cpp b/media/libstagefright/rtsp/UDPPusher.cpp
index 47ea6f1..5c685a1 100644
--- a/media/libstagefright/rtsp/UDPPusher.cpp
+++ b/media/libstagefright/rtsp/UDPPusher.cpp
@@ -65,7 +65,7 @@ void UDPPusher::start() {
mFirstTimeMs = fromlel(timeMs);
mFirstTimeUs = ALooper::GetNowUs();
- (new AMessage(kWhatPush, id()))->post();
+ (new AMessage(kWhatPush, this))->post();
}
bool UDPPusher::onPush() {
@@ -103,7 +103,7 @@ bool UDPPusher::onPush() {
timeMs -= mFirstTimeMs;
int64_t whenUs = mFirstTimeUs + timeMs * 1000ll;
int64_t nowUs = ALooper::GetNowUs();
- (new AMessage(kWhatPush, id()))->post(whenUs - nowUs);
+ (new AMessage(kWhatPush, this))->post(whenUs - nowUs);
return true;
}
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
index 8d6ff5b..51e1c78 100644
--- a/media/libstagefright/tests/Android.mk
+++ b/media/libstagefright/tests/Android.mk
@@ -62,6 +62,33 @@ LOCAL_C_INCLUDES := \
include $(BUILD_NATIVE_TEST)
+include $(CLEAR_VARS)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
+LOCAL_MODULE := MediaCodecListOverrides_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ MediaCodecListOverrides_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libstagefright_omx \
+ libutils \
+ liblog
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/media/libstagefright \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+
+LOCAL_32_BIT_ONLY := true
+
+include $(BUILD_NATIVE_TEST)
+
# Include subdirectory makefiles
# ============================================================
diff --git a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
new file mode 100644
index 0000000..cacaa84
--- /dev/null
+++ b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
@@ -0,0 +1,316 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecListOverrides_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include "MediaCodecListOverrides.h"
+
+#include <media/MediaCodecInfo.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodecList.h>
+
+namespace android {
+
+static const char kTestOverridesStr[] =
+"<MediaCodecs>\n"
+" <Settings>\n"
+" <Setting name=\"max-max-supported-instances\" value=\"8\" update=\"true\" />\n"
+" </Settings>\n"
+" <Encoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
+" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
+" <Limit name=\"bitrate\" range=\"1-20000000\" />\n"
+" <Feature name=\"can-swap-width-height\" />\n"
+" </MediaCodec>\n"
+" </Encoders>\n"
+" <Decoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.avc\" type=\"video/avc\" update=\"true\" >\n"
+" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
+" <Limit name=\"size\" min=\"64x64\" max=\"1920x1088\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"different_mime\" update=\"true\" >\n"
+" </MediaCodec>\n"
+" </Decoders>\n"
+"</MediaCodecs>\n";
+
+static const char kTestOverridesStrNew1[] =
+"<MediaCodecs>\n"
+" <Settings>\n"
+" <Setting name=\"max-max-supported-instances\" value=\"8\" update=\"true\" />\n"
+" </Settings>\n"
+" <Encoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.encoder.avc\" type=\"video/avc\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
+" <Limit name=\"bitrate\" range=\"1-20000000\" />\n"
+" <Feature name=\"can-swap-width-height\" />\n"
+" </MediaCodec>\n"
+" </Encoders>\n"
+" <Decoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.h263\" type=\"video/3gpp\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"1\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.avc\" type=\"video/avc\" update=\"true\" >\n"
+" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
+" <Limit name=\"size\" min=\"64x64\" max=\"1920x1088\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"different_mime\" update=\"true\" >\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"video/mpeg2\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
+" </MediaCodec>\n"
+" </Decoders>\n"
+"</MediaCodecs>\n";
+
+static const char kTestOverridesStrNew2[] =
+"\n"
+"<MediaCodecs>\n"
+" <Encoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.encoder.avc\" type=\"video/avc\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" </MediaCodec>\n"
+" </Encoders>\n"
+" <Decoders>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"video/mpeg2\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.h263\" type=\"video/3gpp\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
+" </MediaCodec>\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"1\" />\n"
+" </MediaCodec>\n"
+" </Decoders>\n"
+"</MediaCodecs>\n";
+
+class MediaCodecListOverridesTest : public ::testing::Test {
+public:
+ MediaCodecListOverridesTest() {}
+
+ void verifyOverrides(const KeyedVector<AString, CodecSettings> &overrides) {
+ EXPECT_EQ(3u, overrides.size());
+
+ EXPECT_TRUE(overrides.keyAt(0) == "OMX.qcom.video.decoder.avc video/avc decoder");
+ const CodecSettings &settings0 = overrides.valueAt(0);
+ EXPECT_EQ(1u, settings0.size());
+ EXPECT_TRUE(settings0.keyAt(0) == "max-supported-instances");
+ EXPECT_TRUE(settings0.valueAt(0) == "4");
+
+ EXPECT_TRUE(overrides.keyAt(1) == "OMX.qcom.video.encoder.avc video/avc encoder");
+ const CodecSettings &settings1 = overrides.valueAt(1);
+ EXPECT_EQ(1u, settings1.size());
+ EXPECT_TRUE(settings1.keyAt(0) == "max-supported-instances");
+ EXPECT_TRUE(settings1.valueAt(0) == "3");
+
+ EXPECT_TRUE(overrides.keyAt(2) == "global");
+ const CodecSettings &settings2 = overrides.valueAt(2);
+ EXPECT_EQ(3u, settings2.size());
+ EXPECT_TRUE(settings2.keyAt(0) == "max-max-supported-instances");
+ EXPECT_TRUE(settings2.valueAt(0) == "8");
+ EXPECT_TRUE(settings2.keyAt(1) == "supports-multiple-secure-codecs");
+ EXPECT_TRUE(settings2.valueAt(1) == "false");
+ EXPECT_TRUE(settings2.keyAt(2) == "supports-secure-with-non-secure-codec");
+ EXPECT_TRUE(settings2.valueAt(2) == "true");
+ }
+
+ void verifySetting(const sp<AMessage> &details, const char *name, const char *value) {
+ AString value1;
+ EXPECT_TRUE(details->findString(name, &value1));
+ EXPECT_TRUE(value1 == value);
+ }
+
+ void createTestInfos(Vector<sp<MediaCodecInfo>> *infos) {
+ const char *name = "OMX.qcom.video.decoder.avc";
+ const bool encoder = false;
+ const char *mime = "video/avc";
+ sp<MediaCodecInfo> info = new MediaCodecInfo(name, encoder, mime);
+ infos->push_back(info);
+ const sp<MediaCodecInfo::Capabilities> caps = info->getCapabilitiesFor(mime);
+ const sp<AMessage> details = caps->getDetails();
+ details->setString("cap1", "value1");
+ details->setString("max-max-supported-instances", "16");
+
+ info = new MediaCodecInfo("anothercodec", true, "anothermime");
+ infos->push_back(info);
+ }
+
+ void addMaxInstancesSetting(
+ const AString &key,
+ const AString &value,
+ KeyedVector<AString, CodecSettings> *results) {
+ CodecSettings settings;
+ settings.add("max-supported-instances", value);
+ results->add(key, settings);
+ }
+
+ void exportTestResultsToXML(const char *fileName) {
+ KeyedVector<AString, CodecSettings> r;
+ addMaxInstancesSetting("OMX.qcom.video.decoder.avc.secure video/avc decoder", "1", &r);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.h263 video/3gpp decoder", "4", &r);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg2 video/mpeg2 decoder", "3", &r);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg4 video/mp4v-es decoder", "3", &r);
+ addMaxInstancesSetting("OMX.qcom.video.encoder.avc video/avc encoder", "4", &r);
+ addMaxInstancesSetting("OMX.qcom.video.encoder.mpeg4 video/mp4v-es encoder", "4", &r);
+
+ exportResultsToXML(fileName, r);
+ }
+};
+
+TEST_F(MediaCodecListOverridesTest, splitString) {
+ AString s = "abc123";
+ AString delimiter = " ";
+ AString s1;
+ AString s2;
+ EXPECT_FALSE(splitString(s, delimiter, &s1, &s2));
+ s = "abc 123";
+ EXPECT_TRUE(splitString(s, delimiter, &s1, &s2));
+ EXPECT_TRUE(s1 == "abc");
+ EXPECT_TRUE(s2 == "123");
+
+ s = "abc123xyz";
+ delimiter = ",";
+ AString s3;
+ EXPECT_FALSE(splitString(s, delimiter, &s1, &s2, &s3));
+ s = "abc,123xyz";
+ EXPECT_FALSE(splitString(s, delimiter, &s1, &s2, &s3));
+ s = "abc,123,xyz";
+ EXPECT_TRUE(splitString(s, delimiter, &s1, &s2, &s3));
+ EXPECT_TRUE(s1 == "abc");
+ EXPECT_TRUE(s2 == "123" );
+ EXPECT_TRUE(s3 == "xyz");
+}
+
+// TODO: the codec component never returns OMX_EventCmdComplete in unit test.
+TEST_F(MediaCodecListOverridesTest, DISABLED_profileCodecs) {
+ sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ Vector<sp<MediaCodecInfo>> infos;
+ for (size_t i = 0; i < list->countCodecs(); ++i) {
+ infos.push_back(list->getCodecInfo(i));
+ }
+ KeyedVector<AString, CodecSettings> results;
+ profileCodecs(infos, &results, true /* forceToMeasure */);
+ EXPECT_LT(0u, results.size());
+ for (size_t i = 0; i < results.size(); ++i) {
+ AString key = results.keyAt(i);
+ CodecSettings settings = results.valueAt(i);
+ EXPECT_EQ(1u, settings.size());
+ EXPECT_TRUE(settings.keyAt(0) == "max-supported-instances");
+ AString valueS = settings.valueAt(0);
+ int32_t value = strtol(valueS.c_str(), NULL, 10);
+ EXPECT_LT(0, value);
+ ALOGV("profileCodecs results %s %s", key.c_str(), valueS.c_str());
+ }
+}
+
+TEST_F(MediaCodecListOverridesTest, applyCodecSettings) {
+ AString codecInfo = "OMX.qcom.video.decoder.avc video/avc decoder";
+ Vector<sp<MediaCodecInfo>> infos;
+ createTestInfos(&infos);
+ CodecSettings settings;
+ settings.add("max-supported-instances", "3");
+ settings.add("max-max-supported-instances", "8");
+ applyCodecSettings(codecInfo, settings, &infos);
+
+ EXPECT_EQ(2u, infos.size());
+ EXPECT_TRUE(AString(infos[0]->getCodecName()) == "OMX.qcom.video.decoder.avc");
+ const sp<AMessage> details = infos[0]->getCapabilitiesFor("video/avc")->getDetails();
+ verifySetting(details, "max-supported-instances", "3");
+ verifySetting(details, "max-max-supported-instances", "8");
+
+ EXPECT_TRUE(AString(infos[1]->getCodecName()) == "anothercodec");
+ EXPECT_EQ(0u, infos[1]->getCapabilitiesFor("anothermime")->getDetails()->countEntries());
+}
+
+TEST_F(MediaCodecListOverridesTest, exportResultsToExistingFile) {
+ const char *fileName = "/sdcard/mediacodec_list_overrides_test.xml";
+ remove(fileName);
+
+ FILE *f = fopen(fileName, "wb");
+ if (f == NULL) {
+ ALOGW("Failed to open %s for writing.", fileName);
+ return;
+ }
+ EXPECT_EQ(
+ strlen(kTestOverridesStr),
+ fwrite(kTestOverridesStr, 1, strlen(kTestOverridesStr), f));
+ fclose(f);
+
+ exportTestResultsToXML(fileName);
+
+ // verify
+ AString overrides;
+ f = fopen(fileName, "rb");
+ ASSERT_TRUE(f != NULL);
+ fseek(f, 0, SEEK_END);
+ long size = ftell(f);
+ rewind(f);
+
+ char *buf = (char *)malloc(size);
+ EXPECT_EQ(1, fread(buf, size, 1, f));
+ overrides.setTo(buf, size);
+ fclose(f);
+ free(buf);
+
+ EXPECT_TRUE(overrides == kTestOverridesStrNew1);
+
+ remove(fileName);
+}
+
+TEST_F(MediaCodecListOverridesTest, exportResultsToEmptyFile) {
+ const char *fileName = "/sdcard/mediacodec_list_overrides_test.xml";
+ remove(fileName);
+
+ exportTestResultsToXML(fileName);
+
+ // verify
+ AString overrides;
+ FILE *f = fopen(fileName, "rb");
+ ASSERT_TRUE(f != NULL);
+ fseek(f, 0, SEEK_END);
+ long size = ftell(f);
+ rewind(f);
+
+ char *buf = (char *)malloc(size);
+ EXPECT_EQ(1, fread(buf, size, 1, f));
+ overrides.setTo(buf, size);
+ fclose(f);
+ free(buf);
+
+ EXPECT_TRUE(overrides == kTestOverridesStrNew2);
+
+ remove(fileName);
+}
+
+} // namespace android
diff --git a/media/libstagefright/timedtext/TimedTextPlayer.cpp b/media/libstagefright/timedtext/TimedTextPlayer.cpp
index a070487..aecf666 100644
--- a/media/libstagefright/timedtext/TimedTextPlayer.cpp
+++ b/media/libstagefright/timedtext/TimedTextPlayer.cpp
@@ -56,25 +56,25 @@ TimedTextPlayer::~TimedTextPlayer() {
}
void TimedTextPlayer::start() {
- (new AMessage(kWhatStart, id()))->post();
+ (new AMessage(kWhatStart, this))->post();
}
void TimedTextPlayer::pause() {
- (new AMessage(kWhatPause, id()))->post();
+ (new AMessage(kWhatPause, this))->post();
}
void TimedTextPlayer::resume() {
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
}
void TimedTextPlayer::seekToAsync(int64_t timeUs) {
- sp<AMessage> msg = new AMessage(kWhatSeek, id());
+ sp<AMessage> msg = new AMessage(kWhatSeek, this);
msg->setInt64("seekTimeUs", timeUs);
msg->post();
}
void TimedTextPlayer::setDataSource(sp<TimedTextSource> source) {
- sp<AMessage> msg = new AMessage(kWhatSetSource, id());
+ sp<AMessage> msg = new AMessage(kWhatSetSource, this);
msg->setObject("source", source);
msg->post();
}
@@ -231,7 +231,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) {
status_t err = mSource->read(&startTimeUs, &endTimeUs,
&(parcelEvent->parcel), options);
if (err == WOULD_BLOCK) {
- sp<AMessage> msg = new AMessage(kWhatRetryRead, id());
+ sp<AMessage> msg = new AMessage(kWhatRetryRead, this);
if (options != NULL) {
int64_t seekTimeUs = kInvalidTimeUs;
MediaSource::ReadOptions::SeekMode seekMode =
@@ -259,7 +259,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) {
void TimedTextPlayer::postTextEvent(const sp<ParcelEvent>& parcel, int64_t timeUs) {
int64_t delayUs = delayUsFromCurrentTime(timeUs);
- sp<AMessage> msg = new AMessage(kWhatSendSubtitle, id());
+ sp<AMessage> msg = new AMessage(kWhatSendSubtitle, this);
msg->setInt32("generation", mSendSubtitleGeneration);
if (parcel != NULL) {
msg->setObject("subtitle", parcel);
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 069961b..737f144 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -80,38 +80,6 @@ WebmWriter::WebmWriter(int fd)
mCuePoints);
}
-WebmWriter::WebmWriter(const char *filename)
- : mInitCheck(NO_INIT),
- mTimeCodeScale(1000000),
- mStartTimestampUs(0),
- mStartTimeOffsetMs(0),
- mSegmentOffset(0),
- mSegmentDataStart(0),
- mInfoOffset(0),
- mInfoSize(0),
- mTracksOffset(0),
- mCuesOffset(0),
- mPaused(false),
- mStarted(false),
- mIsFileSizeLimitExplicitlyRequested(false),
- mIsRealTimeRecording(false),
- mStreamableFile(true),
- mEstimatedCuesSize(0) {
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0));
- mInitCheck = OK;
- }
- mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
- mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
- mSinkThread = new WebmFrameSinkThread(
- mFd,
- mSegmentDataStart,
- mStreams[kVideoIndex].mSink,
- mStreams[kAudioIndex].mSink,
- mCuePoints);
-}
-
// static
sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
int32_t width, height;
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index 36b6965..4ad770e 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -37,7 +37,6 @@ namespace android {
class WebmWriter : public MediaWriter {
public:
WebmWriter(int fd);
- WebmWriter(const char *filename);
~WebmWriter() { reset(); }
diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp
index b1cdec0..6f0087f 100644
--- a/media/libstagefright/wifi-display/MediaSender.cpp
+++ b/media/libstagefright/wifi-display/MediaSender.cpp
@@ -121,7 +121,7 @@ status_t MediaSender::initAsync(
}
if (err == OK) {
- sp<AMessage> notify = new AMessage(kWhatSenderNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
notify->setInt32("generation", mGeneration);
mTSSender = new RTPSender(mNetSession, notify);
looper()->registerHandler(mTSSender);
@@ -170,7 +170,7 @@ status_t MediaSender::initAsync(
return INVALID_OPERATION;
}
- sp<AMessage> notify = new AMessage(kWhatSenderNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
notify->setInt32("generation", mGeneration);
notify->setSize("trackIndex", trackIndex);
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
index e88a3bd..4e72533 100644
--- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -95,11 +95,11 @@ status_t RTPSender::initAsync(
return INVALID_OPERATION;
}
- sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
+ sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this);
sp<AMessage> rtcpNotify;
if (remoteRTCPPort >= 0) {
- rtcpNotify = new AMessage(kWhatRTCPNotify, id());
+ rtcpNotify = new AMessage(kWhatRTCPNotify, this);
}
CHECK_EQ(mRTPSessionID, 0);
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 2834a66..8368945 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -93,7 +93,7 @@ Converter::~Converter() {
void Converter::shutdownAsync() {
ALOGV("shutdown");
- (new AMessage(kWhatShutdown, id()))->post();
+ (new AMessage(kWhatShutdown, this))->post();
}
status_t Converter::init() {
@@ -482,11 +482,11 @@ void Converter::scheduleDoMoreWork() {
#if 1
if (mEncoderActivityNotify == NULL) {
- mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, id());
+ mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this);
}
mEncoder->requestActivityNotification(mEncoderActivityNotify->dup());
#else
- sp<AMessage> notify = new AMessage(kWhatEncoderActivity, id());
+ sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this);
notify->setInt64("whenUs", ALooper::GetNowUs());
mEncoder->requestActivityNotification(notify);
#endif
@@ -731,8 +731,7 @@ status_t Converter::doMoreWork() {
// MediaSender will post the following message when HDCP
// is done, to release the output buffer back to encoder.
- sp<AMessage> notify(new AMessage(
- kWhatReleaseOutputBuffer, id()));
+ sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this));
notify->setInt32("bufferIndex", bufferIndex);
buffer = new ABuffer(
@@ -787,18 +786,18 @@ status_t Converter::doMoreWork() {
}
void Converter::requestIDRFrame() {
- (new AMessage(kWhatRequestIDRFrame, id()))->post();
+ (new AMessage(kWhatRequestIDRFrame, this))->post();
}
void Converter::dropAFrame() {
// Unsupported in surface input mode.
CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT));
- (new AMessage(kWhatDropAFrame, id()))->post();
+ (new AMessage(kWhatDropAFrame, this))->post();
}
void Converter::suspendEncoding(bool suspend) {
- sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, id());
+ sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this);
msg->setInt32("suspend", suspend);
msg->post();
}
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp
index 86b918f..ce07a4e 100644
--- a/media/libstagefright/wifi-display/source/MediaPuller.cpp
+++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp
@@ -63,21 +63,21 @@ status_t MediaPuller::postSynchronouslyAndReturnError(
}
status_t MediaPuller::start() {
- return postSynchronouslyAndReturnError(new AMessage(kWhatStart, id()));
+ return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this));
}
void MediaPuller::stopAsync(const sp<AMessage> &notify) {
- sp<AMessage> msg = new AMessage(kWhatStop, id());
+ sp<AMessage> msg = new AMessage(kWhatStop, this);
msg->setMessage("notify", notify);
msg->post();
}
void MediaPuller::pause() {
- (new AMessage(kWhatPause, id()))->post();
+ (new AMessage(kWhatPause, this))->post();
}
void MediaPuller::resume() {
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
}
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
@@ -105,7 +105,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
break;
@@ -215,7 +215,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
}
void MediaPuller::schedulePull() {
- sp<AMessage> msg = new AMessage(kWhatPull, id());
+ sp<AMessage> msg = new AMessage(kWhatPull, this);
msg->setInt32("generation", mPullGeneration);
msg->post();
}
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 2cb4786..6080943 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -214,7 +214,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
mConverter->shutdownAsync();
}
- sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id());
+ sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this);
if (mStarted && mMediaPuller != NULL) {
if (mRepeaterSource != NULL) {
@@ -382,7 +382,7 @@ status_t WifiDisplaySource::PlaybackSession::init(
size_t videoResolutionIndex,
VideoFormats::ProfileType videoProfileType,
VideoFormats::LevelType videoLevelType) {
- sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this);
mMediaSender = new MediaSender(mNetSession, notify);
looper()->registerHandler(mMediaSender);
@@ -440,7 +440,7 @@ void WifiDisplaySource::PlaybackSession::updateLiveness() {
status_t WifiDisplaySource::PlaybackSession::play() {
updateLiveness();
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
return OK;
}
@@ -460,7 +460,7 @@ status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {
status_t WifiDisplaySource::PlaybackSession::pause() {
updateLiveness();
- (new AMessage(kWhatPause, id()))->post();
+ (new AMessage(kWhatPause, this))->post();
return OK;
}
@@ -786,7 +786,7 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
size_t trackIndex = mTracks.size();
- sp<AMessage> notify = new AMessage(kWhatTrackNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatTrackNotify, this);
notify->setSize("trackIndex", trackIndex);
sp<Track> track = new Track(notify, format);
@@ -833,7 +833,7 @@ void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {
int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs;
- sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, id());
+ sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this);
msg->setInt32("generation", mPullExtractorGeneration);
msg->post(whenUs - nowUs);
@@ -857,7 +857,7 @@ void WifiDisplaySource::PlaybackSession::onPullExtractor() {
size_t trackIndex;
CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex));
- sp<AMessage> msg = new AMessage(kWhatConverterNotify, id());
+ sp<AMessage> msg = new AMessage(kWhatConverterNotify, this);
msg->setSize(
"trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex));
@@ -955,7 +955,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
? MEDIA_MIMETYPE_AUDIO_RAW : MEDIA_MIMETYPE_AUDIO_AAC);
}
- notify = new AMessage(kWhatConverterNotify, id());
+ notify = new AMessage(kWhatConverterNotify, this);
notify->setSize("trackIndex", trackIndex);
sp<Converter> converter = new Converter(notify, codecLooper, format);
@@ -970,7 +970,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
return err;
}
- notify = new AMessage(Converter::kWhatMediaPullerNotify, converter->id());
+ notify = new AMessage(Converter::kWhatMediaPullerNotify, converter);
notify->setSize("trackIndex", trackIndex);
sp<MediaPuller> puller = new MediaPuller(source, notify);
@@ -980,7 +980,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
*numInputBuffers = converter->getInputBufferCount();
}
- notify = new AMessage(kWhatTrackNotify, id());
+ notify = new AMessage(kWhatTrackNotify, this);
notify->setSize("trackIndex", trackIndex);
sp<Track> track = new Track(
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
index 59d7e6e..af6b663 100644
--- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -173,7 +173,7 @@ status_t RepeaterSource::read(
}
void RepeaterSource::postRead() {
- (new AMessage(kWhatRead, mReflector->id()))->post();
+ (new AMessage(kWhatRead, mReflector))->post();
}
void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 7eb8b73..14d0951 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -57,7 +57,7 @@ WifiDisplaySource::WifiDisplaySource(
mNetSession(netSession),
mClient(client),
mSessionID(0),
- mStopReplyID(0),
+ mStopReplyID(NULL),
mChosenRTPPort(-1),
mUsingPCMAudio(false),
mClientSessionID(0),
@@ -106,7 +106,7 @@ static status_t PostAndAwaitResponse(
status_t WifiDisplaySource::start(const char *iface) {
CHECK_EQ(mState, INITIALIZED);
- sp<AMessage> msg = new AMessage(kWhatStart, id());
+ sp<AMessage> msg = new AMessage(kWhatStart, this);
msg->setString("iface", iface);
sp<AMessage> response;
@@ -114,21 +114,21 @@ status_t WifiDisplaySource::start(const char *iface) {
}
status_t WifiDisplaySource::stop() {
- sp<AMessage> msg = new AMessage(kWhatStop, id());
+ sp<AMessage> msg = new AMessage(kWhatStop, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t WifiDisplaySource::pause() {
- sp<AMessage> msg = new AMessage(kWhatPause, id());
+ sp<AMessage> msg = new AMessage(kWhatPause, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t WifiDisplaySource::resume() {
- sp<AMessage> msg = new AMessage(kWhatResume, id());
+ sp<AMessage> msg = new AMessage(kWhatResume, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
@@ -138,7 +138,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatStart:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
AString iface;
@@ -167,7 +167,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
if (err == OK) {
if (inet_aton(iface.c_str(), &mInterfaceAddr) != 0) {
- sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatRTSPNotify, this);
err = mNetSession->createRTSPServer(
mInterfaceAddr, port, notify, &mSessionID);
@@ -310,7 +310,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
if (err == OK) {
mState = AWAITING_CLIENT_TEARDOWN;
- (new AMessage(kWhatTeardownTriggerTimedOut, id()))->post(
+ (new AMessage(kWhatTeardownTriggerTimedOut, this))->post(
kTeardownTriggerTimeouSecs * 1000000ll);
break;
@@ -325,7 +325,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
case kWhatPause:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
status_t err = OK;
@@ -345,7 +345,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
case kWhatResume:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
status_t err = OK;
@@ -492,7 +492,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
if (mState == AWAITING_CLIENT_TEARDOWN) {
ALOGI("TEARDOWN trigger timed out, forcing disconnection.");
- CHECK_NE(mStopReplyID, 0);
+ CHECK(mStopReplyID != NULL);
finishStop();
break;
}
@@ -529,7 +529,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
// HDCPObserver::notify is completely handled before
// we clear the HDCP instance and unload the shared
// library :(
- (new AMessage(kWhatFinishStop2, id()))->post(300000ll);
+ (new AMessage(kWhatFinishStop2, this))->post(300000ll);
break;
}
@@ -1027,7 +1027,7 @@ void WifiDisplaySource::scheduleReaper() {
}
mReaperPending = true;
- (new AMessage(kWhatReapDeadClients, id()))->post(kReaperIntervalUs);
+ (new AMessage(kWhatReapDeadClients, this))->post(kReaperIntervalUs);
}
void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) {
@@ -1035,7 +1035,7 @@ void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) {
// expire, make sure the timeout is greater than 5 secs to begin with.
CHECK_GT(kPlaybackSessionTimeoutUs, 5000000ll);
- sp<AMessage> msg = new AMessage(kWhatKeepAlive, id());
+ sp<AMessage> msg = new AMessage(kWhatKeepAlive, this);
msg->setInt32("sessionID", sessionID);
msg->post(kPlaybackSessionTimeoutUs - 5000000ll);
}
@@ -1239,7 +1239,7 @@ status_t WifiDisplaySource::onSetupRequest(
int32_t playbackSessionID = makeUniquePlaybackSessionID();
- sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this);
notify->setInt32("playbackSessionID", playbackSessionID);
notify->setInt32("sessionID", sessionID);
@@ -1470,7 +1470,7 @@ status_t WifiDisplaySource::onTeardownRequest(
mNetSession->sendRequest(sessionID, response.c_str());
if (mState == AWAITING_CLIENT_TEARDOWN) {
- CHECK_NE(mStopReplyID, 0);
+ CHECK(mStopReplyID != NULL);
finishStop();
} else {
mClient->onDisplayError(IRemoteDisplayClient::kDisplayErrorUnknown);
@@ -1707,7 +1707,7 @@ status_t WifiDisplaySource::makeHDCP() {
return ERROR_UNSUPPORTED;
}
- sp<AMessage> notify = new AMessage(kWhatHDCPNotify, id());
+ sp<AMessage> notify = new AMessage(kWhatHDCPNotify, this);
mHDCPObserver = new HDCPObserver(notify);
status_t err = mHDCP->setObserver(mHDCPObserver);
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
index 750265f..0f779e4 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -27,6 +27,7 @@
namespace android {
+struct AReplyToken;
struct IHDCP;
struct IRemoteDisplayClient;
struct ParsedMessage;
@@ -121,7 +122,7 @@ private:
struct in_addr mInterfaceAddr;
int32_t mSessionID;
- uint32_t mStopReplyID;
+ sp<AReplyToken> mStopReplyID;
AString mWfdClientRtpPorts;
int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports"
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
index bb3e2fd..c098135 100644
--- a/media/libstagefright/yuv/YUVImage.cpp
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -374,13 +374,13 @@ uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
uint8_t *r, uint8_t *g, uint8_t *b) const {
- *r = yValue + (1.370705 * (vValue-128));
- *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
- *b = yValue + (1.732446 * (uValue-128));
+ int rTmp = yValue + (1.370705 * (vValue-128));
+ int gTmp = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+ int bTmp = yValue + (1.732446 * (uValue-128));
- *r = clamp(*r, 0, 255);
- *g = clamp(*g, 0, 255);
- *b = clamp(*b, 0, 255);
+ *r = clamp(rTmp, 0, 255);
+ *g = clamp(gTmp, 0, 255);
+ *b = clamp(bTmp, 0, 255);
}
bool YUVImage::writeToPPM(const char *filename) const {