diff options
| -rw-r--r-- | include/media/stagefright/MediaSync.h | 13 | ||||
| -rw-r--r-- | include/media/stagefright/VideoFrameScheduler.h (renamed from media/libmediaplayerservice/VideoFrameScheduler.h) | 6 | ||||
| -rw-r--r-- | media/libmediaplayerservice/Android.mk | 1 | ||||
| -rw-r--r-- | media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp | 3 | ||||
| -rw-r--r-- | media/libstagefright/Android.mk | 1 | ||||
| -rw-r--r-- | media/libstagefright/MediaSync.cpp | 128 | ||||
| -rw-r--r-- | media/libstagefright/VideoFrameScheduler.cpp (renamed from media/libmediaplayerservice/VideoFrameScheduler.cpp) | 20 | 
7 files changed, 127 insertions, 45 deletions
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h index 1eef211..1b7d7e8 100644 --- a/include/media/stagefright/MediaSync.h +++ b/include/media/stagefright/MediaSync.h @@ -37,6 +37,7 @@ class GraphicBuffer;  class IGraphicBufferConsumer;  class IGraphicBufferProducer;  struct MediaClock; +struct VideoFrameScheduler;  // MediaSync manages media playback and its synchronization to a media clock  // source. It can be also used for video-only playback. @@ -131,11 +132,10 @@ protected:  private:      enum { -        kWhatDrainVideo = 'dVid', +        kWhatDrainVideo          = 'dVid', +        kWhatCheckFrameAvailable = 'cFrA',      }; -    static const int MAX_OUTSTANDING_BUFFERS = 2; -      // This is a thin wrapper class that lets us listen to      // IConsumerListener::onFrameAvailable from mInput.      class InputListener : public BnConsumerListener, @@ -194,6 +194,7 @@ private:      sp<IGraphicBufferConsumer> mInput;      sp<IGraphicBufferProducer> mOutput;      int mUsageFlagsFromOutput; +    uint32_t mMaxAcquiredBufferCount; // max acquired buffer count      sp<AudioTrack> mAudioTrack;      uint32_t mNativeSampleRateInHz; @@ -202,6 +203,7 @@ private:      int64_t mNextBufferItemMediaUs;      List<BufferItem> mBufferItems; +    sp<VideoFrameScheduler> mFrameScheduler;      // Keep track of buffers received from |mInput|. This is needed because      // it's possible the consumer of |mOutput| could return a different @@ -242,8 +244,9 @@ private:      // onBufferReleasedByOutput releases a buffer back to the input.      void onFrameAvailableFromInput(); -    // Send |bufferItem| to the output for rendering. -    void renderOneBufferItem_l(const BufferItem &bufferItem); +    // Send |bufferItem| to the output for rendering. If this is not the only +    // buffer sent for rendering, check for any dropped frames in |checkInUs| us. +    void renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs);      // This implements the onBufferReleased callback from IProducerListener.      // It gets called from an OutputListener. diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/include/media/stagefright/VideoFrameScheduler.h index b1765c9..9d97dfd 100644 --- a/media/libmediaplayerservice/VideoFrameScheduler.h +++ b/include/media/stagefright/VideoFrameScheduler.h @@ -39,6 +39,9 @@ struct VideoFrameScheduler : public RefBase {      // returns the vsync period for the main display      nsecs_t getVsyncPeriod(); +    // returns the current frames-per-second, or 0.f if not primed +    float getFrameRate(); +      void release();      static const size_t kHistorySize = 8; @@ -54,8 +57,9 @@ private:          void reset(float fps = -1);          // keep current estimate, but restart phase          void restart(); -        // returns period +        // returns period or 0 if not yet primed          nsecs_t addSample(nsecs_t time); +        nsecs_t getPeriod() const;      private:          nsecs_t mPeriod; diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 7f0cca2..4d1b587 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -21,7 +21,6 @@ LOCAL_SRC_FILES:=               \      StagefrightPlayer.cpp       \      StagefrightRecorder.cpp     \      TestPlayerStub.cpp          \ -    VideoFrameScheduler.cpp     \  LOCAL_SHARED_LIBRARIES :=       \      libbinder                   \ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 13a7d94..767417b 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -29,8 +29,7 @@  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/Utils.h> - -#include <VideoFrameScheduler.h> +#include <media/stagefright/VideoFrameScheduler.h>  #include <inttypes.h> diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 69128bd..b86c749 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -64,6 +64,7 @@ LOCAL_SRC_FILES:=                         \          TimedEventQueue.cpp               \          Utils.cpp                         \          VBRISeeker.cpp                    \ +        VideoFrameScheduler.cpp           \          WAVExtractor.cpp                  \          WVMExtractor.cpp                  \          XINGSeeker.cpp                    \ diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp index 52077a7..455db42 100644 --- a/media/libstagefright/MediaSync.cpp +++ b/media/libstagefright/MediaSync.cpp @@ -25,6 +25,7 @@  #include <media/AudioTrack.h>  #include <media/stagefright/MediaClock.h>  #include <media/stagefright/MediaSync.h> +#include <media/stagefright/VideoFrameScheduler.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/ALooper.h>  #include <media/stagefright/foundation/AMessage.h> @@ -50,6 +51,7 @@ MediaSync::MediaSync()          mReleaseCondition(),          mNumOutstandingBuffers(0),          mUsageFlagsFromOutput(0), +        mMaxAcquiredBufferCount(1),          mNativeSampleRateInHz(0),          mNumFramesWritten(0),          mHasAudio(false), @@ -121,6 +123,11 @@ status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) {              ALOGE("setSurface: failed to connect (%d)", status);              return status;          } + +        if (mFrameScheduler == NULL) { +            mFrameScheduler = new VideoFrameScheduler(); +            mFrameScheduler->init(); +        }      }      if (mOutput != NULL) { @@ -209,6 +216,12 @@ status_t MediaSync::createInputSurface(          bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);          *outBufferProducer = bufferProducer;          mInput = bufferConsumer; + +        // set undequeued buffer count +        int minUndequeuedBuffers; +        mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers); +        mMaxAcquiredBufferCount = minUndequeuedBuffers; +        bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);      }      return status;  } @@ -326,12 +339,26 @@ void MediaSync::setName(const AString &name) {  }  status_t MediaSync::setVideoFrameRateHint(float rate) { -    // ignored until we add the FrameScheduler -    return rate >= 0.f ? OK : BAD_VALUE; +    Mutex::Autolock lock(mMutex); +    if (rate < 0.f) { +        return BAD_VALUE; +    } +    if (mFrameScheduler != NULL) { +        mFrameScheduler->init(rate); +    } +    return OK;  }  float MediaSync::getVideoFrameRate() { -    // we don't know the frame rate +    Mutex::Autolock lock(mMutex); +    if (mFrameScheduler != NULL) { +        float fps = mFrameScheduler->getFrameRate(); +        if (fps > 0.f) { +            return fps; +        } +    } + +    // we don't have or know the frame rate      return -1.f;  } @@ -470,7 +497,7 @@ int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {          CHECK_EQ(res, (status_t)OK);          numFramesPlayedAt = nowUs;          numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */ -        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); +        //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);      }      //can't be negative until 12.4 hrs, test. @@ -510,18 +537,30 @@ void MediaSync::onDrainVideo_l() {          int64_t itemMediaUs = bufferItem->mTimestamp / 1000;          int64_t itemRealUs = getRealTime(itemMediaUs, nowUs); -        if (itemRealUs <= nowUs) { +        // adjust video frame PTS based on vsync +        itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000; +        int64_t oneVsyncUs = (mFrameScheduler->getVsyncPeriod() / 1000); +        int64_t twoVsyncsUs = oneVsyncUs * 2; + +        // post 2 display refreshes before rendering is due +        if (itemRealUs <= nowUs + twoVsyncsUs) { +            ALOGV("adjusting PTS from %lld to %lld", +                    (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs); +            bufferItem->mTimestamp = itemRealUs * 1000; +            bufferItem->mIsAutoTimestamp = false; +              if (mHasAudio) {                  if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) { -                    renderOneBufferItem_l(*bufferItem); +                    renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);                  } else {                      // too late.                      returnBufferToInput_l(                              bufferItem->mGraphicBuffer, bufferItem->mFence); +                    mFrameScheduler->restart();                  }              } else {                  // always render video buffer in video-only mode. -                renderOneBufferItem_l(*bufferItem); +                renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);                  // smooth out videos >= 10fps                  mMediaClock->updateAnchor( @@ -534,7 +573,7 @@ void MediaSync::onDrainVideo_l() {              if (mNextBufferItemMediaUs == -1                      || mNextBufferItemMediaUs > itemMediaUs) {                  sp<AMessage> msg = new AMessage(kWhatDrainVideo, this); -                msg->post(itemRealUs - nowUs); +                msg->post(itemRealUs - nowUs - twoVsyncsUs);                  mNextBufferItemMediaUs = itemMediaUs;              }              break; @@ -545,10 +584,15 @@ void MediaSync::onDrainVideo_l() {  void MediaSync::onFrameAvailableFromInput() {      Mutex::Autolock lock(mMutex); +    const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds +      // If there are too many outstanding buffers, wait until a buffer is      // released back to the input in onBufferReleased. -    while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) { -        mReleaseCondition.wait(mMutex); +    // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers +    while (mNumOutstandingBuffers > mMaxAcquiredBufferCount && !mIsAbandoned) { +        if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) { +            ALOGI("still waiting to release a buffer before acquire"); +        }          // If the sync is abandoned while we are waiting, the release          // condition variable will be broadcast, and we should just return @@ -582,6 +626,7 @@ void MediaSync::onFrameAvailableFromInput() {      if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {          // Something is wrong since this buffer should be at our hands, bail. +        ALOGE("received buffer multiple times from input");          mInput->consumerDisconnect();          onAbandoned_l(true /* isInput */);          return; @@ -595,7 +640,7 @@ void MediaSync::onFrameAvailableFromInput() {      }  } -void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) { +void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs) {      IGraphicBufferProducer::QueueBufferInput queueInput(              bufferItem.mTimestamp,              bufferItem.mIsAutoTimestamp, @@ -635,6 +680,12 @@ void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {      mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);      ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId()); + +    // If we have already queued more than one buffer, check for any free buffers in case +    // one of them were dropped - as BQ does not signal onBufferReleased in that case. +    if (mBuffersSentToOutput.size() > 1) { +        (new AMessage(kWhatCheckFrameAvailable, this))->post(checkInUs); +    }  }  void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) { @@ -646,32 +697,38 @@ void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {      sp<GraphicBuffer> buffer;      sp<Fence> fence; -    status_t status = mOutput->detachNextBuffer(&buffer, &fence); -    ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status); +    status_t status; +    // NOTE: This is a workaround for a BufferQueue bug where onBufferReleased is +    // called only for released buffers, but not for buffers that were dropped during +    // acquire. Dropped buffers can still be detached as they are on the free list. +    // TODO: remove if released callback happens also for dropped buffers +    while ((status = mOutput->detachNextBuffer(&buffer, &fence)) != NO_MEMORY) { +        ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status); -    if (status == NO_INIT) { -        // If the output has been abandoned, we can't do anything else, -        // since buffer is invalid. -        onAbandoned_l(false /* isInput */); -        return; -    } +        if (status == NO_INIT) { +            // If the output has been abandoned, we can't do anything else, +            // since buffer is invalid. +            onAbandoned_l(false /* isInput */); +            return; +        } -    ALOGV("detached buffer %#llx from output", (long long)buffer->getId()); +        ALOGV("detached buffer %#llx from output", (long long)buffer->getId()); -    // If we've been abandoned, we can't return the buffer to the input, so just -    // move on. -    if (mIsAbandoned) { -        return; -    } +        // If we've been abandoned, we can't return the buffer to the input, so just +        // move on. +        if (mIsAbandoned) { +            return; +        } -    ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId()); -    if (ix < 0) { -        // The buffer is unknown, maybe leftover, ignore. -        return; -    } -    mBuffersSentToOutput.removeItemsAt(ix); +        ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId()); +        if (ix < 0) { +            // The buffer is unknown, maybe leftover, ignore. +            return; +        } +        mBuffersSentToOutput.removeItemsAt(ix); -    returnBufferToInput_l(buffer, fence); +        returnBufferToInput_l(buffer, fence); +    }  }  void MediaSync::returnBufferToInput_l( @@ -679,6 +736,7 @@ void MediaSync::returnBufferToInput_l(      ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId());      if (ix < 0) {          // The buffer is unknown, something is wrong, bail. +        ALOGE("output returned unknown buffer");          mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);          onAbandoned_l(false /* isInput */);          return; @@ -741,6 +799,12 @@ void MediaSync::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatCheckFrameAvailable: +        { +            onBufferReleasedByOutput(mOutput); +            break; +        } +          default:              TRESPASS();              break; diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp index ce5f5fe..5fe9bf9 100644 --- a/media/libmediaplayerservice/VideoFrameScheduler.cpp +++ b/media/libstagefright/VideoFrameScheduler.cpp @@ -28,8 +28,7 @@  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AUtils.h> - -#include "VideoFrameScheduler.h" +#include <media/stagefright/VideoFrameScheduler.h>  namespace android { @@ -56,7 +55,7 @@ static const size_t kMinSamplesToEstimatePeriod = 3;  static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;  static const size_t kPrecision = 12; -static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10; +static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;  static const int64_t kMultiplesThresholdDiv = 4;            // 25%  static const int64_t kReFitThresholdDiv = 100;              // 1%  static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s;     // 1 sec @@ -258,7 +257,8 @@ void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {              mPhase = firstTime;          }      } -    ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod); +    ALOGV("priming[%zu] phase:%lld period:%lld", +            numSamplesToUse, (long long)mPhase, (long long)mPeriod);  }  nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) { @@ -316,6 +316,10 @@ nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {      return mPeriod;  } +nsecs_t VideoFrameScheduler::PLL::getPeriod() const { +    return mPrimed ? mPeriod : 0; +} +  /* ======================================================================= */  /*                             Frame Scheduler                             */  /* ======================================================================= */ @@ -382,6 +386,14 @@ nsecs_t VideoFrameScheduler::getVsyncPeriod() {      return kDefaultVsyncPeriod;  } +float VideoFrameScheduler::getFrameRate() { +    nsecs_t videoPeriod = mPll.getPeriod(); +    if (videoPeriod > 0) { +        return 1e9 / videoPeriod; +    } +    return 0.f; +} +  nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {      nsecs_t origRenderTime = renderTime;  | 
