diff options
Diffstat (limited to 'media/libmediaplayerservice')
16 files changed, 907 insertions, 71 deletions
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index adc066d..2cf5710 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -22,6 +22,7 @@ LOCAL_SRC_FILES:= \ StagefrightPlayer.cpp \ StagefrightRecorder.cpp \ TestPlayerStub.cpp \ + VideoFrameScheduler.cpp \ LOCAL_SHARED_LIBRARIES := \ libbinder \ diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index c8cb7ed..8eb1269 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -43,6 +43,7 @@ #include <utils/Errors.h> // for status_t #include <utils/String8.h> #include <utils/SystemClock.h> +#include <utils/Timers.h> #include <utils/Vector.h> #include <media/IMediaHTTPService.h> @@ -1496,6 +1497,12 @@ status_t MediaPlayerService::AudioOutput::getPosition(uint32_t *position) const return mTrack->getPosition(position); } +status_t MediaPlayerService::AudioOutput::getTimestamp(AudioTimestamp &ts) const +{ + if (mTrack == 0) return NO_INIT; + return mTrack->getTimestamp(ts); +} + status_t MediaPlayerService::AudioOutput::getFramesWritten(uint32_t *frameswritten) const { if (mTrack == 0) return NO_INIT; @@ -1971,6 +1978,15 @@ status_t MediaPlayerService::AudioCache::getPosition(uint32_t *position) const return NO_ERROR; } +status_t MediaPlayerService::AudioCache::getTimestamp(AudioTimestamp &ts) const +{ + ts.mPosition = mSize / mFrameSize; + nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC); + ts.mTime.tv_sec = now / 1000000000LL; + ts.mTime.tv_nsec = now - (1000000000LL * ts.mTime.tv_sec); + return NO_ERROR; +} + status_t MediaPlayerService::AudioCache::getFramesWritten(uint32_t *written) const { if (written == 0) return BAD_VALUE; diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 4fe7075..3b96e88 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -85,6 +85,7 @@ class MediaPlayerService : public BnMediaPlayerService virtual uint32_t latency() const; virtual float msecsPerFrame() const; virtual status_t getPosition(uint32_t *position) const; + virtual status_t getTimestamp(AudioTimestamp &ts) const; virtual status_t getFramesWritten(uint32_t *frameswritten) const; virtual int getSessionId() const; virtual uint32_t getSampleRate() const; @@ -198,6 +199,7 @@ class MediaPlayerService : public BnMediaPlayerService virtual uint32_t latency() const; virtual float msecsPerFrame() const; virtual status_t getPosition(uint32_t *position) const; + virtual status_t getTimestamp(AudioTimestamp &ts) const; virtual status_t getFramesWritten(uint32_t *frameswritten) const; virtual int getSessionId() const; virtual uint32_t getSampleRate() const; diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libmediaplayerservice/VideoFrameScheduler.cpp new file mode 100644 index 0000000..4251c4e --- /dev/null +++ b/media/libmediaplayerservice/VideoFrameScheduler.cpp @@ -0,0 +1,499 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VideoFrameScheduler" +#include <utils/Log.h> +#define ATRACE_TAG ATRACE_TAG_VIDEO +#include <utils/Trace.h> + +#include <sys/time.h> + +#include <binder/IServiceManager.h> +#include <gui/ISurfaceComposer.h> +#include <ui/DisplayStatInfo.h> + +#include <media/stagefright/foundation/ADebug.h> + +#include "VideoFrameScheduler.h" + +namespace android { + +static const nsecs_t kNanosIn1s = 1000000000; + +template<class T> +inline static const T divRound(const T &nom, const T &den) { + if ((nom >= 0) ^ (den >= 0)) { + return (nom - den / 2) / den; + } else { + return (nom + den / 2) / den; + } +} + +template<class T> +inline static T abs(const T &a) { + return a < 0 ? -a : a; +} + +template<class T> +inline static const T &min(const T &a, const T &b) { + return a < b ? a : b; +} + +template<class T> +inline static const T &max(const T &a, const T &b) { + return a > b ? a : b; +} + +template<class T> +inline static T periodicError(const T &val, const T &period) { + T err = abs(val) % period; + return (err < (period / 2)) ? err : (period - err); +} + +template<class T> +static int compare(const T *lhs, const T *rhs) { + if (*lhs < *rhs) { + return -1; + } else if (*lhs > *rhs) { + return 1; + } else { + return 0; + } +} + +/* ======================================================================= */ +/* PLL */ +/* ======================================================================= */ + +static const size_t kMinSamplesToStartPrime = 3; +static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize; +static const size_t kMinSamplesToEstimatePeriod = 3; +static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize; + +static const size_t kPrecision = 12; +static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10; +static const int64_t kMultiplesThresholdDiv = 4; // 25% +static const int64_t kReFitThresholdDiv = 100; // 1% +static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec +static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz +static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec + +VideoFrameScheduler::PLL::PLL() + : mPeriod(-1), + mPhase(0), + mPrimed(false), + mSamplesUsedForPriming(0), + mLastTime(-1), + mNumSamples(0) { +} + +void VideoFrameScheduler::PLL::reset(float fps) { + //test(); + + mSamplesUsedForPriming = 0; + mLastTime = -1; + + // set up or reset video PLL + if (fps <= 0.f) { + mPeriod = -1; + mPrimed = false; + } else { + ALOGV("reset at %.1f fps", fps); + mPeriod = (nsecs_t)(1e9 / fps + 0.5); + mPrimed = true; + } + + restart(); +} + +// reset PLL but keep previous period estimate +void VideoFrameScheduler::PLL::restart() { + mNumSamples = 0; + mPhase = -1; +} + +#if 0 + +void VideoFrameScheduler::PLL::test() { + nsecs_t period = kNanosIn1s / 60; + mTimes[0] = 0; + mTimes[1] = period; + mTimes[2] = period * 3; + mTimes[3] = period * 4; + mTimes[4] = period * 7; + mTimes[5] = period * 8; + mTimes[6] = period * 10; + mTimes[7] = period * 12; + mNumSamples = 8; + int64_t a, b, err; + fit(0, period * 12 / 7, 8, &a, &b, &err); + // a = 0.8(5)+ + // b = -0.14097(2)+ + // err = 0.2750578(703)+ + ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)", + (long long)a, (a / (float)(1 << kPrecision)), + (long long)b, (b / (float)(1 << kPrecision)), + (long long)err, (err / (float)(1 << (kPrecision * 2)))); +} + +#endif + +void VideoFrameScheduler::PLL::fit( + nsecs_t phase, nsecs_t period, size_t numSamplesToUse, + int64_t *a, int64_t *b, int64_t *err) { + if (numSamplesToUse > mNumSamples) { + numSamplesToUse = mNumSamples; + } + + int64_t sumX = 0; + int64_t sumXX = 0; + int64_t sumXY = 0; + int64_t sumYY = 0; + int64_t sumY = 0; + + int64_t x = 0; // x usually is in [0..numSamplesToUse) + nsecs_t lastTime; + for (size_t i = 0; i < numSamplesToUse; i++) { + size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize; + nsecs_t time = mTimes[ix]; + if (i > 0) { + x += divRound(time - lastTime, period); + } + // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision + // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during + // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod + // while we are not refitting. + int64_t y = divRound(time - phase, period >> kPrecision); + sumX += x; + sumY += y; + sumXX += x * x; + sumXY += x * y; + sumYY += y * y; + lastTime = time; + } + + int64_t div = numSamplesToUse * sumXX - sumX * sumX; + int64_t a_nom = numSamplesToUse * sumXY - sumX * sumY; + int64_t b_nom = sumXX * sumY - sumX * sumXY; + *a = divRound(a_nom, div); + *b = divRound(b_nom, div); + // don't use a and b directly as the rounding error is significant + *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div); + ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)", + numSamplesToUse, + (long long)*a, (*a / (float)(1 << kPrecision)), + (long long)*b, (*b / (float)(1 << kPrecision)), + (long long)*err, (*err / (float)(1 << (kPrecision * 2)))); +} + +void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) { + if (numSamplesToUse > mNumSamples) { + numSamplesToUse = mNumSamples; + } + CHECK(numSamplesToUse >= 3); // must have at least 3 samples + + // estimate video framerate from deltas between timestamps, and + // 2nd order deltas + Vector<nsecs_t> deltas; + nsecs_t lastTime, firstTime; + for (size_t i = 0; i < numSamplesToUse; ++i) { + size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize; + nsecs_t time = mTimes[index]; + if (i > 0) { + if (time - lastTime > kMinPeriod) { + //ALOGV("delta: %lld", (long long)(time - lastTime)); + deltas.push(time - lastTime); + } + } else { + firstTime = time; + } + lastTime = time; + } + deltas.sort(compare<nsecs_t>); + size_t numDeltas = deltas.size(); + if (numDeltas > 1) { + nsecs_t deltaMinLimit = min(deltas[0] / kMultiplesThresholdDiv, kMinPeriod); + nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv; + for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) { + if (deltas[i] > deltaMaxLimit) { + deltas.resize(i); + numDeltas = i; + break; + } + } + for (size_t i = 1; i < numDeltas; ++i) { + nsecs_t delta2nd = deltas[i] - deltas[i - 1]; + if (delta2nd >= deltaMinLimit) { + //ALOGV("delta2: %lld", (long long)(delta2nd)); + deltas.push(delta2nd); + } + } + } + + // use the one that yields the best match + int64_t bestScore; + for (size_t i = 0; i < deltas.size(); ++i) { + nsecs_t delta = deltas[i]; + int64_t score = 0; +#if 1 + // simplest score: number of deltas that are near multiples + size_t matches = 0; + for (size_t j = 0; j < deltas.size(); ++j) { + nsecs_t err = periodicError(deltas[j], delta); + if (err < delta / kMultiplesThresholdDiv) { + ++matches; + } + } + score = matches; +#if 0 + // could be weighed by the (1 - normalized error) + if (numSamplesToUse >= kMinSamplesToEstimatePeriod) { + int64_t a, b, err; + fit(firstTime, delta, numSamplesToUse, &a, &b, &err); + err = (1 << (2 * kPrecision)) - err; + score *= max(0, err); + } +#endif +#else + // or use the error as a negative score + if (numSamplesToUse >= kMinSamplesToEstimatePeriod) { + int64_t a, b, err; + fit(firstTime, delta, numSamplesToUse, &a, &b, &err); + score = -delta * err; + } +#endif + if (i == 0 || score > bestScore) { + bestScore = score; + mPeriod = delta; + mPhase = firstTime; + } + } + ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod); +} + +nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) { + if (mLastTime >= 0 + // if time goes backward, or we skipped rendering + && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) { + restart(); + } + + mLastTime = time; + mTimes[mNumSamples % kHistorySize] = time; + ++mNumSamples; + + bool doFit = time > mRefitAt; + if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) { + prime(kMinSamplesToStopPrime); + ++mSamplesUsedForPriming; + doFit = true; + } + if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) { + if (mPhase < 0) { + // initialize phase to the current render time + mPhase = time; + doFit = true; + } else if (!doFit) { + int64_t err = periodicError(time - mPhase, mPeriod); + doFit = err > mPeriod / kReFitThresholdDiv; + } + + if (doFit) { + int64_t a, b, err; + mRefitAt = time + kRefitRefreshPeriod; + fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err); + mPhase += (mPeriod * b) >> kPrecision; + mPeriod = (mPeriod * a) >> kPrecision; + ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod); + + if (err < kErrorThreshold) { + if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) { + mPrimed = true; + } + } else { + mPrimed = false; + mSamplesUsedForPriming = 0; + } + } + } + return mPeriod; +} + +/* ======================================================================= */ +/* Frame Scheduler */ +/* ======================================================================= */ + +static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz +static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec + +VideoFrameScheduler::VideoFrameScheduler() + : mVsyncTime(0), + mVsyncPeriod(0), + mVsyncRefreshAt(0), + mLastVsyncTime(-1), + mTimeCorrection(0) { +} + +void VideoFrameScheduler::updateVsync() { + mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod; + mVsyncPeriod = 0; + mVsyncTime = 0; + + // TODO: schedule frames for the destination surface + // For now, surface flinger only schedules frames on the primary display + if (mComposer == NULL) { + String16 name("SurfaceFlinger"); + sp<IServiceManager> sm = defaultServiceManager(); + mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name)); + } + if (mComposer != NULL) { + DisplayStatInfo stats; + status_t res = mComposer->getDisplayStats(NULL /* display */, &stats); + if (res == OK) { + ALOGV("vsync time:%lld period:%lld", + (long long)stats.vsyncTime, (long long)stats.vsyncPeriod); + mVsyncTime = stats.vsyncTime; + mVsyncPeriod = stats.vsyncPeriod; + } else { + ALOGW("getDisplayStats returned %d", res); + } + } else { + ALOGW("could not get surface mComposer service"); + } +} + +void VideoFrameScheduler::init(float videoFps) { + updateVsync(); + + mLastVsyncTime = -1; + mTimeCorrection = 0; + + mPll.reset(videoFps); +} + +void VideoFrameScheduler::restart() { + mLastVsyncTime = -1; + mTimeCorrection = 0; + + mPll.restart(); +} + +nsecs_t VideoFrameScheduler::getVsyncPeriod() { + if (mVsyncPeriod > 0) { + return mVsyncPeriod; + } + return kDefaultVsyncPeriod; +} + +nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) { + nsecs_t origRenderTime = renderTime; + + nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC); + if (now >= mVsyncRefreshAt) { + updateVsync(); + } + + // without VSYNC info, there is nothing to do + if (mVsyncPeriod == 0) { + ALOGV("no vsync: render=%lld", (long long)renderTime); + return renderTime; + } + + // ensure vsync time is well before (corrected) render time + if (mVsyncTime > renderTime - 4 * mVsyncPeriod) { + mVsyncTime -= + ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod; + } + + // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime + // so this effectively becomes a rounding operation (to the _closest_ VSYNC.) + renderTime -= mVsyncPeriod / 2; + + const nsecs_t videoPeriod = mPll.addSample(origRenderTime); + if (videoPeriod > 0) { + // Smooth out rendering + size_t N = 12; + nsecs_t fiveSixthDev = + abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod) + / (mVsyncPeriod / 100); + // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz) + if (fiveSixthDev < 12) { /* 12% / 6 = 2% */ + N = 20; + } + + nsecs_t offset = 0; + nsecs_t edgeRemainder = 0; + for (size_t i = 1; i <= N; i++) { + offset += + (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod; + edgeRemainder += (videoPeriod * i) % mVsyncPeriod; + } + mTimeCorrection += mVsyncPeriod / 2 - offset / N; + renderTime += mTimeCorrection; + nsecs_t correctionLimit = mVsyncPeriod * 3 / 5; + edgeRemainder = abs(edgeRemainder / N - mVsyncPeriod / 2); + if (edgeRemainder <= mVsyncPeriod / 3) { + correctionLimit /= 2; + } + + // estimate how many VSYNCs a frame will spend on the display + nsecs_t nextVsyncTime = + renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod); + if (mLastVsyncTime >= 0) { + size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod; + size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod); + bool vsyncsPerFrameAreNearlyConstant = + periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0; + + if (mTimeCorrection > correctionLimit && + (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) { + // remove a VSYNC + mTimeCorrection -= mVsyncPeriod / 2; + renderTime -= mVsyncPeriod / 2; + nextVsyncTime -= mVsyncPeriod; + --vsyncsForLastFrame; + } else if (mTimeCorrection < -correctionLimit && + (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) { + // add a VSYNC + mTimeCorrection += mVsyncPeriod / 2; + renderTime += mVsyncPeriod / 2; + nextVsyncTime += mVsyncPeriod; + ++vsyncsForLastFrame; + } + ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame); + } + mLastVsyncTime = nextVsyncTime; + } + + // align rendertime to the center between VSYNC edges + renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod; + renderTime += mVsyncPeriod / 2; + ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime); + ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000); + return renderTime; +} + +void VideoFrameScheduler::release() { + mComposer.clear(); +} + +VideoFrameScheduler::~VideoFrameScheduler() { + release(); +} + +} // namespace android + diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/media/libmediaplayerservice/VideoFrameScheduler.h new file mode 100644 index 0000000..19f0787 --- /dev/null +++ b/media/libmediaplayerservice/VideoFrameScheduler.h @@ -0,0 +1,98 @@ +/* + * Copyright 2014, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_FRAME_SCHEDULER_H_ +#define VIDEO_FRAME_SCHEDULER_H_ + +#include <utils/RefBase.h> +#include <utils/Timers.h> + +#include <media/stagefright/foundation/ABase.h> + +namespace android { + +struct ISurfaceComposer; + +struct VideoFrameScheduler : public RefBase { + VideoFrameScheduler(); + + // (re)initialize scheduler + void init(float videoFps = -1); + // use in case of video render-time discontinuity, e.g. seek + void restart(); + // get adjusted nanotime for a video frame render at renderTime + nsecs_t schedule(nsecs_t renderTime); + + // returns the vsync period for the main display + nsecs_t getVsyncPeriod(); + + void release(); + + static const size_t kHistorySize = 8; + +protected: + virtual ~VideoFrameScheduler(); + +private: + struct PLL { + PLL(); + + // reset PLL to new PLL + void reset(float fps = -1); + // keep current estimate, but restart phase + void restart(); + // returns period + nsecs_t addSample(nsecs_t time); + + private: + nsecs_t mPeriod; + nsecs_t mPhase; + + bool mPrimed; // have an estimate for the period + size_t mSamplesUsedForPriming; + + nsecs_t mLastTime; // last input time + nsecs_t mRefitAt; // next input time to fit at + + size_t mNumSamples; // can go past kHistorySize + nsecs_t mTimes[kHistorySize]; + + void test(); + void fit(nsecs_t phase, nsecs_t period, size_t numSamples, + int64_t *a, int64_t *b, int64_t *err); + void prime(size_t numSamples); + }; + + void updateVsync(); + + nsecs_t mVsyncTime; // vsync timing from display + nsecs_t mVsyncPeriod; + nsecs_t mVsyncRefreshAt; // next time to refresh timing info + + nsecs_t mLastVsyncTime; // estimated vsync time for last frame + nsecs_t mTimeCorrection; // running adjustment + + PLL mPll; // PLL for video frame rate based on render time + + sp<ISurfaceComposer> mComposer; + + DISALLOW_EVIL_CONSTRUCTORS(VideoFrameScheduler); +}; + +} // namespace android + +#endif // VIDEO_FRAME_SCHEDULER_H_ + diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk index 0dd2b61..676c0a6 100644 --- a/media/libmediaplayerservice/nuplayer/Android.mk +++ b/media/libmediaplayerservice/nuplayer/Android.mk @@ -19,6 +19,7 @@ LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright/mpeg2ts \ $(TOP)/frameworks/av/media/libstagefright/rtsp \ $(TOP)/frameworks/av/media/libstagefright/timedtext \ + $(TOP)/frameworks/av/media/libmediaplayerservice \ $(TOP)/frameworks/native/include/media/openmax LOCAL_MODULE:= libstagefright_nuplayer diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index a0870fd..bd75034 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -106,6 +106,10 @@ status_t NuPlayer::GenericSource::setDataSource( return OK; } +sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const { + return mFileMeta; +} + status_t NuPlayer::GenericSource::initFromDataSource() { sp<MediaExtractor> extractor; @@ -144,17 +148,22 @@ status_t NuPlayer::GenericSource::initFromDataSource() { checkDrmStatus(mDataSource); } - sp<MetaData> fileMeta = extractor->getMetaData(); - if (fileMeta != NULL) { + mFileMeta = extractor->getMetaData(); + if (mFileMeta != NULL) { int64_t duration; - if (fileMeta->findInt64(kKeyDuration, &duration)) { + if (mFileMeta->findInt64(kKeyDuration, &duration)) { mDurationUs = duration; } } int32_t totalBitrate = 0; - for (size_t i = 0; i < extractor->countTracks(); ++i) { + size_t numtracks = extractor->countTracks(); + if (numtracks == 0) { + return UNKNOWN_ERROR; + } + + for (size_t i = 0; i < numtracks; ++i) { sp<MediaSource> track = extractor->getTrack(i); sp<MetaData> meta = extractor->getTrackMetaData(i); @@ -464,6 +473,15 @@ void NuPlayer::GenericSource::resume() { mStarted = true; } +void NuPlayer::GenericSource::disconnect() { + if (mDataSource != NULL) { + // disconnect data source + if (mDataSource->flags() & DataSource::kIsCachingDataSource) { + static_cast<NuCachedSource2 *>(mDataSource.get())->disconnect(); + } + } +} + void NuPlayer::GenericSource::setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position) { if (mDecryptHandle != NULL) { mDrmManagerClient->setPlaybackStatus(mDecryptHandle, playbackStatus, position); @@ -942,7 +960,7 @@ status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select) { ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex); sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); msg->setInt32("trackIndex", trackIndex); - msg->setInt32("select", trackIndex); + msg->setInt32("select", select); sp<AMessage> response; status_t err = msg->postAndAwaitResponse(&response); @@ -1247,6 +1265,8 @@ void NuPlayer::GenericSource::readBuffer( sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType, actualTimeUs); track->mPackets->queueAccessUnit(buffer); + formatChange = false; + seeking = false; ++numBuffers; } else if (err == WOULD_BLOCK) { break; diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index c70c48e..24bb6af 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -55,8 +55,12 @@ struct NuPlayer::GenericSource : public NuPlayer::Source { virtual void pause(); virtual void resume(); + virtual void disconnect(); + virtual status_t feedMoreTSData(); + virtual sp<MetaData> getFileFormatMeta() const; + virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); virtual status_t getDuration(int64_t *durationUs); @@ -123,6 +127,7 @@ private: sp<DataSource> mDataSource; sp<NuCachedSource2> mCachedSource; sp<WVMExtractor> mWVMExtractor; + sp<MetaData> mFileMeta; DrmManagerClient *mDrmManagerClient; sp<DecryptHandle> mDecryptHandle; bool mStarted; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 9020a8d..dad480d 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -310,6 +310,16 @@ void NuPlayer::resume() { } void NuPlayer::resetAsync() { + if (mSource != NULL) { + // During a reset, the data source might be unresponsive already, we need to + // disconnect explicitly so that reads exit promptly. + // We can't queue the disconnect request to the looper, as it might be + // queued behind a stuck read and never gets processed. + // Doing a disconnect outside the looper to allows the pending reads to exit + // (either successfully or with error). + mSource->disconnect(); + } + (new AMessage(kWhatReset, id()))->post(); } @@ -633,6 +643,13 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO); mRendererLooper->registerHandler(mRenderer); + sp<MetaData> meta = getFileMeta(); + int32_t rate; + if (meta != NULL + && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) { + mRenderer->setVideoFrameRate(rate); + } + postScanSources(); break; } @@ -1260,8 +1277,8 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { // Aggregate smaller buffers into a larger buffer. // The goal is to reduce power consumption. - // Unfortunately this does not work with the software AAC decoder. - bool doBufferAggregation = (audio && mOffloadAudio);; + // Note this will not work if the decoder requires one frame per buffer. + bool doBufferAggregation = (audio && mOffloadAudio); bool needMoreData = false; bool dropAccessUnit; @@ -1281,7 +1298,7 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { return err; } else if (err != OK) { if (err == INFO_DISCONTINUITY) { - if (mAggregateBuffer != NULL) { + if (doBufferAggregation && (mAggregateBuffer != NULL)) { // We already have some data so save this for later. mPendingAudioErr = err; mPendingAudioAccessUnit = accessUnit; @@ -1404,7 +1421,7 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { mAggregateBuffer->setRange(0, 0); // start empty } - if (mAggregateBuffer != NULL) { + if (doBufferAggregation && (mAggregateBuffer != NULL)) { int64_t timeUs; int64_t dummy; bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs); @@ -1453,7 +1470,7 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { mCCDecoder->decode(accessUnit); } - if (mAggregateBuffer != NULL) { + if (doBufferAggregation && (mAggregateBuffer != NULL)) { ALOGV("feedDecoderInputData() reply with aggregated buffer, %zu", mAggregateBuffer->size()); reply->setBuffer("buffer", mAggregateBuffer); @@ -1720,6 +1737,10 @@ status_t NuPlayer::selectTrack(size_t trackIndex, bool select) { return err; } +sp<MetaData> NuPlayer::getFileMeta() { + return mSource->getFileFormatMeta(); +} + void NuPlayer::schedulePollDuration() { sp<AMessage> msg = new AMessage(kWhatPollDuration, id()); msg->setInt32("generation", mPollDurationGeneration); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 2e951bd..7197e5f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -67,6 +67,8 @@ struct NuPlayer : public AHandler { status_t getSelectedTrack(int32_t type, Parcel* reply) const; status_t selectTrack(size_t trackIndex, bool select); + sp<MetaData> getFileMeta(); + static const size_t kAggregateBufferSizeBytes; protected: diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp index 601cd40..cdb860c 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp @@ -465,7 +465,9 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) { size_t bufferIx; CHECK(msg->findSize("buffer-ix", &bufferIx)); if (msg->findInt32("render", &render) && render) { - err = mCodec->renderOutputBufferAndRelease(bufferIx); + int64_t timestampNs; + CHECK(msg->findInt64("timestampNs", ×tampNs)); + err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs); } else { err = mCodec->releaseOutputBuffer(bufferIx); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index 7dd54c1..7ec9876 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -45,6 +45,7 @@ NuPlayerDriver::NuPlayerDriver() mPlayerFlags(0), mAtEOS(false), mLooping(false), + mAutoLoop(false), mStartupSeekTimeUs(-1) { mLooper->setName("NuPlayerDriver Looper"); @@ -263,8 +264,22 @@ status_t NuPlayerDriver::start() { case STATE_PAUSED: case STATE_STOPPED_AND_PREPARED: { - mPlayer->resume(); - mPositionUs -= ALooper::GetNowUs() - mPauseStartedTimeUs; + if (mAtEOS) { + mPlayer->seekToAsync(0); + mAtEOS = false; + mPlayer->resume(); + mPositionUs = -1; + } else { + mPlayer->resume(); + if (mNotifyTimeRealUs != -1) { + // Pause time must be set if here by setPauseStartedTimeIfNeeded(). + //CHECK(mPauseStartedTimeUs != -1); + + // if no seek occurs, adjust our notify time so that getCurrentPosition() + // is continuous if read immediately after calling start(). + mNotifyTimeRealUs += ALooper::GetNowUs() - mPauseStartedTimeUs; + } + } break; } @@ -371,15 +386,36 @@ status_t NuPlayerDriver::getCurrentPosition(int *msec) { Mutex::Autolock autoLock(mLock); if (mPositionUs < 0) { + // mPositionUs is the media time. + // It is negative under these cases + // (1) == -1 after reset, or very first playback, no stream notification yet. + // (2) == -1 start after end of stream, no stream notification yet. + // (3) == large negative # after ~292,471 years of continuous playback. + + //CHECK_EQ(mPositionUs, -1); *msec = 0; } else if (mNotifyTimeRealUs == -1) { + // A seek has occurred just occurred, no stream notification yet. + // mPositionUs (>= 0) is the new media position. *msec = mPositionUs / 1000; } else { + // mPosition must be valid (i.e. >= 0) by the first check above. + // We're either playing or have pause time set: mPauseStartedTimeUs is >= 0 + //LOG_ALWAYS_FATAL_IF( + // !isPlaying() && mPauseStartedTimeUs < 0, + // "Player in non-playing mState(%d) and mPauseStartedTimeUs(%lld) < 0", + // mState, (long long)mPauseStartedTimeUs); + ALOG_ASSERT(mNotifyTimeRealUs >= 0); int64_t nowUs = (isPlaying() ? ALooper::GetNowUs() : mPauseStartedTimeUs); *msec = (mPositionUs + nowUs - mNotifyTimeRealUs + 500ll) / 1000; + // It is possible for *msec to be negative if the media position is > 596 hours. + // but we turn on this checking in NDEBUG == 0 mode. + ALOG_ASSERT(*msec >= 0); + ALOGV("getCurrentPosition nowUs(%lld)", (long long)nowUs); } - + ALOGV("getCurrentPosition returning(%d) mPositionUs(%lld) mNotifyRealTimeUs(%lld)", + *msec, (long long)mPositionUs, (long long)mNotifyTimeRealUs); return OK; } @@ -498,6 +534,7 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) { void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) { mPlayer->setAudioSink(audioSink); + mAudioSink = audioSink; } status_t NuPlayerDriver::setParameter( @@ -627,7 +664,8 @@ void NuPlayerDriver::notifyListener_l( case MEDIA_PLAYBACK_COMPLETE: { if (mState != STATE_RESET_IN_PROGRESS) { - if (mLooping) { + if (mLooping || (mAutoLoop + && (mAudioSink == NULL || mAudioSink->realtime()))) { mPlayer->seekToAsync(0); break; } @@ -693,6 +731,13 @@ void NuPlayerDriver::notifyPrepareCompleted(status_t err) { } } + sp<MetaData> meta = mPlayer->getFileMeta(); + int32_t loop; + if (meta != NULL + && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) { + mAutoLoop = true; + } + mCondition.broadcast(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index e81d605..f2bd431 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -114,10 +114,12 @@ private: sp<ALooper> mLooper; sp<NuPlayer> mPlayer; + sp<AudioSink> mAudioSink; uint32_t mPlayerFlags; bool mAtEOS; bool mLooping; + bool mAutoLoop; int64_t mStartupSeekTimeUs; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 067784b..a8c8818 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -26,6 +26,8 @@ #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> +#include <VideoFrameScheduler.h> + #include <inttypes.h> namespace android { @@ -45,7 +47,7 @@ NuPlayer::Renderer::Renderer( mDrainVideoQueuePending(false), mAudioQueueGeneration(0), mVideoQueueGeneration(0), - mFirstAudioTimeUs(-1), + mFirstAnchorTimeMediaUs(-1), mAnchorTimeMediaUs(-1), mAnchorTimeRealUs(-1), mFlushingAudio(false), @@ -54,12 +56,12 @@ NuPlayer::Renderer::Renderer( mHasVideo(false), mSyncQueues(false), mPaused(false), + mVideoSampleReceived(false), mVideoRenderingStarted(false), mVideoRenderingStartGeneration(0), mAudioRenderingStartGeneration(0), mLastPositionUpdateUs(-1ll), - mVideoLateByUs(0ll), - mVideoSampleReceived(false) { + mVideoLateByUs(0ll) { } NuPlayer::Renderer::~Renderer() { @@ -115,6 +117,7 @@ void NuPlayer::Renderer::signalTimeDiscontinuity() { Mutex::Autolock autoLock(mLock); // CHECK(mAudioQueue.empty()); // CHECK(mVideoQueue.empty()); + mFirstAnchorTimeMediaUs = -1; mAnchorTimeMediaUs = -1; mAnchorTimeRealUs = -1; mSyncQueues = false; @@ -136,6 +139,12 @@ void NuPlayer::Renderer::resume() { (new AMessage(kWhatResume, id()))->post(); } +void NuPlayer::Renderer::setVideoFrameRate(float fps) { + sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); + msg->setFloat("frame-rate", fps); + msg->post(); +} + void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatStopAudioSink: @@ -236,6 +245,14 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatSetVideoFrameRate: + { + float fps; + CHECK(msg->findFloat("frame-rate", &fps)); + onSetVideoFrameRate(fps); + break; + } + case kWhatAudioOffloadTearDown: { onAudioOffloadTearDown(); @@ -339,19 +356,16 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); - if (mFirstAudioTimeUs == -1) { - mFirstAudioTimeUs = mediaTimeUs; + if (mFirstAnchorTimeMediaUs == -1) { + mFirstAnchorTimeMediaUs = mediaTimeUs; } - uint32_t numFramesPlayed; - CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); + int64_t nowUs = ALooper::GetNowUs(); + mAnchorTimeMediaUs = + mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); + mAnchorTimeRealUs = nowUs; - // TODO: figure out how to calculate initial latency. - // Otherwise, the initial time is not correct till the first sample - // is played. - mAnchorTimeMediaUs = mFirstAudioTimeUs - + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll; - mAnchorTimeRealUs = ALooper::GetNowUs(); + notifyPosition(); } size_t copy = entry->mBuffer->size() - entry->mOffset; @@ -374,10 +388,6 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { notifyIfMediaRenderingStarted(); } - if (sizeCopied != 0) { - notifyPosition(); - } - if (hasEOS) { (new AMessage(kWhatStopAudioSink, id()))->post(); } @@ -413,7 +423,7 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { // EOS int64_t postEOSDelayUs = 0; if (mAudioSink->needsTrailingPadding()) { - postEOSDelayUs = getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency(); + postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); } notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); @@ -426,10 +436,15 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); + if (mFirstAnchorTimeMediaUs == -1) { + mFirstAnchorTimeMediaUs = mediaTimeUs; + } mAnchorTimeMediaUs = mediaTimeUs; - mAnchorTimeRealUs = ALooper::GetNowUs() - + getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency() / 2; + int64_t nowUs = ALooper::GetNowUs(); + mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs); + + notifyPosition(); } size_t copy = entry->mBuffer->size() - entry->mOffset; @@ -478,17 +493,13 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { break; } } - notifyPosition(); - return !mAudioQueue.empty(); } -int64_t NuPlayer::Renderer::getAudioPendingPlayoutUs() { - uint32_t numFramesPlayed; - CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); - - uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; - return numFramesPendingPlayout * mAudioSink->msecsPerFrame() * 1000; +int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { + int64_t writtenAudioDurationUs = + mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); + return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); } void NuPlayer::Renderer::postDrainVideoQueue() { @@ -507,37 +518,48 @@ void NuPlayer::Renderer::postDrainVideoQueue() { sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); msg->setInt32("generation", mVideoQueueGeneration); - int64_t delayUs; - if (entry.mBuffer == NULL) { // EOS doesn't carry a timestamp. - delayUs = 0; - } else if (mFlags & FLAG_REAL_TIME) { + msg->post(); + mDrainVideoQueuePending = true; + return; + } + + int64_t delayUs; + int64_t nowUs = ALooper::GetNowUs(); + int64_t realTimeUs; + if (mFlags & FLAG_REAL_TIME) { int64_t mediaTimeUs; CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); - - delayUs = mediaTimeUs - ALooper::GetNowUs(); + realTimeUs = mediaTimeUs; } else { int64_t mediaTimeUs; CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) { + mFirstAnchorTimeMediaUs = mediaTimeUs; + } if (mAnchorTimeMediaUs < 0) { - delayUs = 0; - if (!mHasAudio) { mAnchorTimeMediaUs = mediaTimeUs; - mAnchorTimeRealUs = ALooper::GetNowUs(); + mAnchorTimeRealUs = nowUs; + notifyPosition(); } + realTimeUs = nowUs; } else { - int64_t realTimeUs = + realTimeUs = (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; - - delayUs = realTimeUs - ALooper::GetNowUs(); } } + realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; + int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); + + delayUs = realTimeUs - nowUs; + ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); - msg->post(delayUs); + // post 2 display refreshes before rendering is due + msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); mDrainVideoQueuePending = true; } @@ -558,8 +580,6 @@ void NuPlayer::Renderer::onDrainVideoQueue() { entry = NULL; mVideoLateByUs = 0ll; - - notifyPosition(); return; } @@ -591,6 +611,7 @@ void NuPlayer::Renderer::onDrainVideoQueue() { mVideoLateByUs = 0ll; } + entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); entry->mNotifyConsumed->setInt32("render", !tooLate); entry->mNotifyConsumed->post(); mVideoQueue.erase(mVideoQueue.begin()); @@ -605,8 +626,6 @@ void NuPlayer::Renderer::onDrainVideoQueue() { } notifyIfMediaRenderingStarted(); } - - notifyPosition(); } void NuPlayer::Renderer::notifyVideoRenderingStart() { @@ -635,6 +654,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { mHasAudio = true; } else { mHasVideo = true; + if (mVideoScheduler == NULL) { + mVideoScheduler = new VideoFrameScheduler(); + mVideoScheduler->init(); + } } if (dropBufferWhileFlushing(audio, msg)) { @@ -783,7 +806,7 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { prepareForMediaRenderingStart(); if (offloadingAudio()) { - mFirstAudioTimeUs = -1; + mFirstAnchorTimeMediaUs = -1; } } @@ -800,6 +823,10 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { mDrainVideoQueuePending = false; ++mVideoQueueGeneration; + if (mVideoScheduler != NULL) { + mVideoScheduler->restart(); + } + prepareForMediaRenderingStart(); } @@ -871,9 +898,11 @@ void NuPlayer::Renderer::onDisableOffloadAudio() { } void NuPlayer::Renderer::notifyPosition() { - if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { - return; - } + // notifyPosition() must be called only after setting mAnchorTimeRealUs + // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position. + //CHECK_GE(mAnchorTimeRealUs, 0); + //CHECK_GE(mAnchorTimeMediaUs, 0); + //CHECK(!mPaused || !mHasAudio); // video-only does display in paused mode. int64_t nowUs = ALooper::GetNowUs(); @@ -885,6 +914,18 @@ void NuPlayer::Renderer::notifyPosition() { int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; + //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)" + // " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)", + // (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs, + // (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs); + + // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(), + // positionUs may be less than the first media time. This is avoided + // here to prevent potential retrograde motion of the position bar + // when starting up after a seek. + if (positionUs < mFirstAnchorTimeMediaUs) { + positionUs = mFirstAnchorTimeMediaUs; + } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatPosition); notify->setInt64("positionUs", positionUs); @@ -937,17 +978,87 @@ void NuPlayer::Renderer::onResume() { } } -void NuPlayer::Renderer::onAudioOffloadTearDown() { +void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { + if (mVideoScheduler == NULL) { + mVideoScheduler = new VideoFrameScheduler(); + } + mVideoScheduler->init(fps); +} + +// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() +// as it acquires locks and may query the audio driver. +// +// Some calls are not needed since notifyPosition() doesn't always deliver a message. +// Some calls could conceivably retrieve extrapolated data instead of +// accessing getTimestamp() or getPosition() every time a data buffer with +// a media time is received. +// +int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { uint32_t numFramesPlayed; - CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); + int64_t numFramesPlayedAt; + AudioTimestamp ts; + static const int64_t kStaleTimestamp100ms = 100000; + + status_t res = mAudioSink->getTimestamp(ts); + if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. + numFramesPlayed = ts.mPosition; + numFramesPlayedAt = + ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; + const int64_t timestampAge = nowUs - numFramesPlayedAt; + if (timestampAge > kStaleTimestamp100ms) { + // This is an audio FIXME. + // getTimestamp returns a timestamp which may come from audio mixing threads. + // After pausing, the MixerThread may go idle, thus the mTime estimate may + // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, + // the max latency should be about 25ms with an average around 12ms (to be verified). + // For safety we use 100ms. + ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", + (long long)nowUs, (long long)numFramesPlayedAt); + numFramesPlayedAt = nowUs - kStaleTimestamp100ms; + } + //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); + } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track + numFramesPlayed = 0; + numFramesPlayedAt = nowUs; + //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", + // numFramesPlayed, (long long)numFramesPlayedAt); + } else { // case 3: transitory at new track or audio fast tracks. + res = mAudioSink->getPosition(&numFramesPlayed); + CHECK_EQ(res, (status_t)OK); + numFramesPlayedAt = nowUs; + numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ + //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); + } + + // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. + //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test + int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame() + + nowUs - numFramesPlayedAt; + if (durationUs < 0) { + // Occurs when numFramesPlayed position is very small and the following: + // (1) In case 1, the time nowUs is computed before getTimestamp() is called and + // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. + // (2) In case 3, using getPosition and adding mAudioSink->latency() to + // numFramesPlayedAt, by a time amount greater than numFramesPlayed. + // + // Both of these are transitory conditions. + ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs); + durationUs = 0; + } + ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", + (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); + return durationUs; +} +void NuPlayer::Renderer::onAudioOffloadTearDown() { int64_t firstAudioTimeUs; { Mutex::Autolock autoLock(mLock); - firstAudioTimeUs = mFirstAudioTimeUs; + firstAudioTimeUs = mFirstAnchorTimeMediaUs; } - int64_t currentPositionUs = firstAudioTimeUs - + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll; + + int64_t currentPositionUs = + firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs()); mAudioSink->stop(); mAudioSink->flush(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index 5c7d2d7..e28071f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -23,6 +23,7 @@ namespace android { struct ABuffer; +struct VideoFrameScheduler; struct NuPlayer::Renderer : public AHandler { enum Flags { @@ -56,6 +57,8 @@ struct NuPlayer::Renderer : public AHandler { void pause(); void resume(); + void setVideoFrameRate(float fps); + enum { kWhatEOS = 'eos ', kWhatFlushComplete = 'fluC', @@ -82,6 +85,7 @@ private: kWhatResume = 'resm', kWhatStopAudioSink = 'stpA', kWhatDisableOffloadAudio = 'noOA', + kWhatSetVideoFrameRate = 'sVFR', }; struct QueueEntry { @@ -100,13 +104,14 @@ private: List<QueueEntry> mAudioQueue; List<QueueEntry> mVideoQueue; uint32_t mNumFramesWritten; + sp<VideoFrameScheduler> mVideoScheduler; bool mDrainAudioQueuePending; bool mDrainVideoQueuePending; int32_t mAudioQueueGeneration; int32_t mVideoQueueGeneration; - int64_t mFirstAudioTimeUs; + int64_t mFirstAnchorTimeMediaUs; int64_t mAnchorTimeMediaUs; int64_t mAnchorTimeRealUs; @@ -130,7 +135,8 @@ private: size_t fillAudioBuffer(void *buffer, size_t size); bool onDrainAudioQueue(); - int64_t getAudioPendingPlayoutUs(); + int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs); + int64_t getPlayedOutAudioDurationUs(int64_t nowUs); void postDrainAudioQueue_l(int64_t delayUs = 0); void onDrainVideoQueue(); @@ -146,6 +152,7 @@ private: void onDisableOffloadAudio(); void onPause(); void onResume(); + void onSetVideoFrameRate(float fps); void onAudioOffloadTearDown(); void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index 7ccf3b1..2f06c31 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -67,12 +67,16 @@ struct NuPlayer::Source : public AHandler { virtual void pause() {} virtual void resume() {} + // Explicitly disconnect the underling data source + virtual void disconnect() {} + // Returns OK iff more data was available, // an error or ERROR_END_OF_STREAM if not. virtual status_t feedMoreTSData() = 0; virtual sp<AMessage> getFormat(bool audio); virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; } + virtual sp<MetaData> getFileFormatMeta() const { return NULL; } virtual status_t dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) = 0; |