summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/media/stagefright/MediaSync.h17
-rw-r--r--include/media/stagefright/VideoFrameScheduler.h (renamed from media/libmediaplayerservice/VideoFrameScheduler.h)6
-rw-r--r--media/libmedia/mediaplayer.cpp23
-rw-r--r--media/libmediaplayerservice/Android.mk1
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp16
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h3
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp28
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp3
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp54
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h3
-rw-r--r--media/libstagefright/Android.mk1
-rw-r--r--media/libstagefright/MediaSync.cpp158
-rw-r--r--media/libstagefright/VideoFrameScheduler.cpp (renamed from media/libmediaplayerservice/VideoFrameScheduler.cpp)20
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp12
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.h2
-rw-r--r--media/libstagefright/rtsp/MyHandler.h28
-rwxr-xr-xservices/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk7
17 files changed, 309 insertions, 73 deletions
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
index 1eef211..4b5cd05 100644
--- a/include/media/stagefright/MediaSync.h
+++ b/include/media/stagefright/MediaSync.h
@@ -37,6 +37,7 @@ class GraphicBuffer;
class IGraphicBufferConsumer;
class IGraphicBufferProducer;
struct MediaClock;
+struct VideoFrameScheduler;
// MediaSync manages media playback and its synchronization to a media clock
// source. It can be also used for video-only playback.
@@ -103,6 +104,9 @@ public:
// MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
sp<const MediaClock> getMediaClock();
+ // Flush mediasync
+ void flush();
+
// Set the video frame rate hint - this is used by the video FrameScheduler
status_t setVideoFrameRateHint(float rate);
@@ -131,11 +135,10 @@ protected:
private:
enum {
- kWhatDrainVideo = 'dVid',
+ kWhatDrainVideo = 'dVid',
+ kWhatCheckFrameAvailable = 'cFrA',
};
- static const int MAX_OUTSTANDING_BUFFERS = 2;
-
// This is a thin wrapper class that lets us listen to
// IConsumerListener::onFrameAvailable from mInput.
class InputListener : public BnConsumerListener,
@@ -194,6 +197,8 @@ private:
sp<IGraphicBufferConsumer> mInput;
sp<IGraphicBufferProducer> mOutput;
int mUsageFlagsFromOutput;
+ uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
+ bool mReturnPendingInputFrame; // set while we are pending before acquiring an input frame
sp<AudioTrack> mAudioTrack;
uint32_t mNativeSampleRateInHz;
@@ -202,6 +207,7 @@ private:
int64_t mNextBufferItemMediaUs;
List<BufferItem> mBufferItems;
+ sp<VideoFrameScheduler> mFrameScheduler;
// Keep track of buffers received from |mInput|. This is needed because
// it's possible the consumer of |mOutput| could return a different
@@ -242,8 +248,9 @@ private:
// onBufferReleasedByOutput releases a buffer back to the input.
void onFrameAvailableFromInput();
- // Send |bufferItem| to the output for rendering.
- void renderOneBufferItem_l(const BufferItem &bufferItem);
+ // Send |bufferItem| to the output for rendering. If this is not the only
+ // buffer sent for rendering, check for any dropped frames in |checkInUs| us.
+ void renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs);
// This implements the onBufferReleased callback from IProducerListener.
// It gets called from an OutputListener.
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/include/media/stagefright/VideoFrameScheduler.h
index b1765c9..9d97dfd 100644
--- a/media/libmediaplayerservice/VideoFrameScheduler.h
+++ b/include/media/stagefright/VideoFrameScheduler.h
@@ -39,6 +39,9 @@ struct VideoFrameScheduler : public RefBase {
// returns the vsync period for the main display
nsecs_t getVsyncPeriod();
+ // returns the current frames-per-second, or 0.f if not primed
+ float getFrameRate();
+
void release();
static const size_t kHistorySize = 8;
@@ -54,8 +57,9 @@ private:
void reset(float fps = -1);
// keep current estimate, but restart phase
void restart();
- // returns period
+ // returns period or 0 if not yet primed
nsecs_t addSample(nsecs_t time);
+ nsecs_t getPeriod() const;
private:
nsecs_t mPeriod;
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 81a5e8c..c215abf 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -260,9 +260,10 @@ status_t MediaPlayer::setVideoSurfaceTexture(
status_t MediaPlayer::prepareAsync_l()
{
if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
- mPlayer->setAudioStreamType(mStreamType);
if (mAudioAttributesParcel != NULL) {
mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
+ } else {
+ mPlayer->setAudioStreamType(mStreamType);
}
mCurrentState = MEDIA_PLAYER_PREPARING;
return mPlayer->prepareAsync();
@@ -734,24 +735,28 @@ status_t MediaPlayer::checkStateForKeySet_l(int key)
status_t MediaPlayer::setParameter(int key, const Parcel& request)
{
ALOGV("MediaPlayer::setParameter(%d)", key);
+ status_t status = INVALID_OPERATION;
Mutex::Autolock _l(mLock);
if (checkStateForKeySet_l(key) != OK) {
- return INVALID_OPERATION;
- }
- if (mPlayer != NULL) {
- return mPlayer->setParameter(key, request);
+ return status;
}
switch (key) {
case KEY_PARAMETER_AUDIO_ATTRIBUTES:
- // no player, save the marshalled audio attributes
+ // save the marshalled audio attributes
if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };
mAudioAttributesParcel = new Parcel();
mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize());
- return OK;
+ status = OK;
+ break;
default:
- ALOGV("setParameter: no active player");
- return INVALID_OPERATION;
+ ALOGV_IF(mPlayer == NULL, "setParameter: no active player");
+ break;
+ }
+
+ if (mPlayer != NULL) {
+ status = mPlayer->setParameter(key, request);
}
+ return status;
}
status_t MediaPlayer::getParameter(int key, Parcel *reply)
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 7f0cca2..4d1b587 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -21,7 +21,6 @@ LOCAL_SRC_FILES:= \
StagefrightPlayer.cpp \
StagefrightRecorder.cpp \
TestPlayerStub.cpp \
- VideoFrameScheduler.cpp \
LOCAL_SHARED_LIBRARIES := \
libbinder \
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 0ecfb1e..abbbc20 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -45,6 +45,7 @@
#include <utils/Timers.h>
#include <utils/Vector.h>
+#include <media/AudioPolicyHelper.h>
#include <media/IMediaHTTPService.h>
#include <media/IRemoteDisplay.h>
#include <media/IRemoteDisplayClient.h>
@@ -1351,6 +1352,10 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid, int pid,
mFlags(AUDIO_OUTPUT_FLAG_NONE)
{
ALOGV("AudioOutput(%d)", sessionId);
+ if (attr != NULL) {
+ mStreamType = audio_attributes_to_stream_type(attr);
+ }
+
setMinBufferCount();
}
@@ -1464,6 +1469,17 @@ String8 MediaPlayerService::AudioOutput::getParameters(const String8& keys)
void MediaPlayerService::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) {
Mutex::Autolock lock(mLock);
mAttributes = attributes;
+ if (attributes != NULL) {
+ mStreamType = audio_attributes_to_stream_type(attributes);
+ }
+}
+
+void MediaPlayerService::AudioOutput::setAudioStreamType(audio_stream_type_t streamType)
+{
+ // do not allow direct stream type modification if attributes have been set
+ if (mAttributes == NULL) {
+ mStreamType = streamType;
+ }
}
void MediaPlayerService::AudioOutput::deleteRecycledTrack_l()
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 9e6ca52..1c32597 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -107,8 +107,7 @@ class MediaPlayerService : public BnMediaPlayerService
virtual void flush();
virtual void pause();
virtual void close();
- void setAudioStreamType(audio_stream_type_t streamType) {
- mStreamType = streamType; }
+ void setAudioStreamType(audio_stream_type_t streamType);
virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
void setAudioAttributes(const audio_attributes_t * attributes);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 3e0ee08..2fdc196 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1071,6 +1071,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt32("audio", &audio));
ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+ if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
+ || mFlushingAudio == SHUT_DOWN)) {
+ // Flush has been handled by tear down.
+ break;
+ }
handleFlushComplete(audio, false /* isDecoder */);
finishFlushIfPossible();
} else if (what == Renderer::kWhatVideoRenderingStart) {
@@ -1079,14 +1084,27 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
} else if (what == Renderer::kWhatAudioTearDown) {
- int64_t positionUs;
- CHECK(msg->findInt64("positionUs", &positionUs));
int32_t reason;
CHECK(msg->findInt32("reason", &reason));
ALOGV("Tear down audio with reason %d.", reason);
- closeAudioSink();
mAudioDecoder.clear();
++mAudioDecoderGeneration;
+ bool needsToCreateAudioDecoder = true;
+ if (mFlushingAudio == FLUSHING_DECODER) {
+ mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+ mFlushingAudio = FLUSHED;
+ finishFlushIfPossible();
+ } else if (mFlushingAudio == FLUSHING_DECODER_SHUTDOWN
+ || mFlushingAudio == SHUTTING_DOWN_DECODER) {
+ mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+ mFlushingAudio = SHUT_DOWN;
+ finishFlushIfPossible();
+ needsToCreateAudioDecoder = false;
+ }
+ if (mRenderer == NULL) {
+ break;
+ }
+ closeAudioSink();
mRenderer->flush(
true /* audio */, false /* notifyComplete */);
if (mVideoDecoder != NULL) {
@@ -1094,9 +1112,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
false /* audio */, false /* notifyComplete */);
}
+ int64_t positionUs;
+ CHECK(msg->findInt64("positionUs", &positionUs));
performSeek(positionUs);
- if (reason == Renderer::kDueToError) {
+ if (reason == Renderer::kDueToError && needsToCreateAudioDecoder) {
instantiateDecoder(true /* audio */, &mAudioDecoder);
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 13a7d94..767417b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -29,8 +29,7 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
-
-#include <VideoFrameScheduler.h>
+#include <media/stagefright/VideoFrameScheduler.h>
#include <inttypes.h>
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 5210fc8..58ff113 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -138,7 +138,9 @@ void NuPlayer::RTSPSource::pause() {
}
void NuPlayer::RTSPSource::resume() {
- mHandler->resume();
+ if (mHandler != NULL) {
+ mHandler->resume();
+ }
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
@@ -295,13 +297,19 @@ status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);
msg->setInt32("generation", ++mSeekGeneration);
msg->setInt64("timeUs", seekTimeUs);
- msg->post(200000ll);
- return OK;
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
}
void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) {
if (mState != CONNECTED) {
+ finishSeek(INVALID_OPERATION);
return;
}
@@ -320,9 +328,11 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
} else if (msg->what() == kWhatPerformSeek) {
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
+ CHECK(msg->senderAwaitsResponse(&mSeekReplyID));
if (generation != mSeekGeneration) {
// obsolete.
+ finishSeek(OK);
return;
}
@@ -368,6 +378,37 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
case MyHandler::kWhatSeekDone:
{
mState = CONNECTED;
+ if (mSeekReplyID != NULL) {
+ // Unblock seekTo here in case we attempted to seek in a live stream
+ finishSeek(OK);
+ }
+ break;
+ }
+
+ case MyHandler::kWhatSeekPaused:
+ {
+ sp<AnotherPacketSource> source = getSource(true /* audio */);
+ if (source != NULL) {
+ source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+ /* extra */ NULL,
+ /* discard */ true);
+ }
+ source = getSource(false /* video */);
+ if (source != NULL) {
+ source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+ /* extra */ NULL,
+ /* discard */ true);
+ };
+
+ status_t err = OK;
+ msg->findInt32("err", &err);
+ finishSeek(err);
+
+ if (err == OK) {
+ int64_t timeUs;
+ CHECK(msg->findInt64("time", &timeUs));
+ mHandler->continueSeekAfterPause(timeUs);
+ }
break;
}
@@ -700,5 +741,12 @@ bool NuPlayer::RTSPSource::stopBufferingIfNecessary() {
return true;
}
+void NuPlayer::RTSPSource::finishSeek(status_t err) {
+ CHECK(mSeekReplyID != NULL);
+ sp<AMessage> seekReply = new AMessage;
+ seekReply->setInt32("err", err);
+ seekReply->postReply(mSeekReplyID);
+ mSeekReplyID = NULL;
+}
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 5f2cf33..6438a1e 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -116,6 +116,8 @@ private:
int64_t mEOSTimeoutAudio;
int64_t mEOSTimeoutVideo;
+ sp<AReplyToken> mSeekReplyID;
+
sp<AnotherPacketSource> getSource(bool audio);
void onConnected();
@@ -131,6 +133,7 @@ private:
void setError(status_t err);
void startBufferingIfNecessary();
bool stopBufferingIfNecessary();
+ void finishSeek(status_t err);
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 69128bd..b86c749 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -64,6 +64,7 @@ LOCAL_SRC_FILES:= \
TimedEventQueue.cpp \
Utils.cpp \
VBRISeeker.cpp \
+ VideoFrameScheduler.cpp \
WAVExtractor.cpp \
WVMExtractor.cpp \
XINGSeeker.cpp \
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 52077a7..0df3ec9 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -25,6 +25,7 @@
#include <media/AudioTrack.h>
#include <media/stagefright/MediaClock.h>
#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/VideoFrameScheduler.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -50,6 +51,8 @@ MediaSync::MediaSync()
mReleaseCondition(),
mNumOutstandingBuffers(0),
mUsageFlagsFromOutput(0),
+ mMaxAcquiredBufferCount(1),
+ mReturnPendingInputFrame(false),
mNativeSampleRateInHz(0),
mNumFramesWritten(0),
mHasAudio(false),
@@ -121,6 +124,11 @@ status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) {
ALOGE("setSurface: failed to connect (%d)", status);
return status;
}
+
+ if (mFrameScheduler == NULL) {
+ mFrameScheduler = new VideoFrameScheduler();
+ mFrameScheduler->init();
+ }
}
if (mOutput != NULL) {
@@ -209,6 +217,12 @@ status_t MediaSync::createInputSurface(
bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);
*outBufferProducer = bufferProducer;
mInput = bufferConsumer;
+
+ // set undequeued buffer count
+ int minUndequeuedBuffers;
+ mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
+ mMaxAcquiredBufferCount = minUndequeuedBuffers;
+ bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
}
return status;
}
@@ -232,6 +246,7 @@ void MediaSync::updatePlaybackRate_l(float rate) {
mNextBufferItemMediaUs = -1;
}
mPlaybackRate = rate;
+ // TODO: update frame scheduler with this info
mMediaClock->setPlaybackRate(rate);
onDrainVideo_l();
}
@@ -325,13 +340,44 @@ void MediaSync::setName(const AString &name) {
mInput->setConsumerName(String8(name.c_str()));
}
+void MediaSync::flush() {
+ Mutex::Autolock lock(mMutex);
+ if (mFrameScheduler != NULL) {
+ mFrameScheduler->restart();
+ }
+ while (!mBufferItems.empty()) {
+ BufferItem *bufferItem = &*mBufferItems.begin();
+ returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence);
+ mBufferItems.erase(mBufferItems.begin());
+ }
+ mNextBufferItemMediaUs = -1;
+ mNumFramesWritten = 0;
+ mReturnPendingInputFrame = true;
+ mReleaseCondition.signal();
+ mMediaClock->clearAnchor();
+}
+
status_t MediaSync::setVideoFrameRateHint(float rate) {
- // ignored until we add the FrameScheduler
- return rate >= 0.f ? OK : BAD_VALUE;
+ Mutex::Autolock lock(mMutex);
+ if (rate < 0.f) {
+ return BAD_VALUE;
+ }
+ if (mFrameScheduler != NULL) {
+ mFrameScheduler->init(rate);
+ }
+ return OK;
}
float MediaSync::getVideoFrameRate() {
- // we don't know the frame rate
+ Mutex::Autolock lock(mMutex);
+ if (mFrameScheduler != NULL) {
+ float fps = mFrameScheduler->getFrameRate();
+ if (fps > 0.f) {
+ return fps;
+ }
+ }
+
+ // we don't have or know the frame rate
return -1.f;
}
@@ -470,7 +516,7 @@ int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {
CHECK_EQ(res, (status_t)OK);
numFramesPlayedAt = nowUs;
numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
- //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
}
//can't be negative until 12.4 hrs, test.
@@ -510,18 +556,30 @@ void MediaSync::onDrainVideo_l() {
int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
- if (itemRealUs <= nowUs) {
+ // adjust video frame PTS based on vsync
+ itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000;
+ int64_t oneVsyncUs = (mFrameScheduler->getVsyncPeriod() / 1000);
+ int64_t twoVsyncsUs = oneVsyncUs * 2;
+
+ // post 2 display refreshes before rendering is due
+ if (itemRealUs <= nowUs + twoVsyncsUs) {
+ ALOGV("adjusting PTS from %lld to %lld",
+ (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs);
+ bufferItem->mTimestamp = itemRealUs * 1000;
+ bufferItem->mIsAutoTimestamp = false;
+
if (mHasAudio) {
if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
- renderOneBufferItem_l(*bufferItem);
+ renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);
} else {
// too late.
returnBufferToInput_l(
bufferItem->mGraphicBuffer, bufferItem->mFence);
+ mFrameScheduler->restart();
}
} else {
// always render video buffer in video-only mode.
- renderOneBufferItem_l(*bufferItem);
+ renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);
// smooth out videos >= 10fps
mMediaClock->updateAnchor(
@@ -534,7 +592,7 @@ void MediaSync::onDrainVideo_l() {
if (mNextBufferItemMediaUs == -1
|| mNextBufferItemMediaUs > itemMediaUs) {
sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
- msg->post(itemRealUs - nowUs);
+ msg->post(itemRealUs - nowUs - twoVsyncsUs);
mNextBufferItemMediaUs = itemMediaUs;
}
break;
@@ -545,10 +603,18 @@ void MediaSync::onDrainVideo_l() {
void MediaSync::onFrameAvailableFromInput() {
Mutex::Autolock lock(mMutex);
+ const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds
+
+ mReturnPendingInputFrame = false;
+
// If there are too many outstanding buffers, wait until a buffer is
// released back to the input in onBufferReleased.
- while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) {
- mReleaseCondition.wait(mMutex);
+ // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers
+ while (mNumOutstandingBuffers > mMaxAcquiredBufferCount
+ && !mIsAbandoned && !mReturnPendingInputFrame) {
+ if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) {
+ ALOGI("still waiting to release a buffer before acquire");
+ }
// If the sync is abandoned while we are waiting, the release
// condition variable will be broadcast, and we should just return
@@ -582,12 +648,21 @@ void MediaSync::onFrameAvailableFromInput() {
if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
// Something is wrong since this buffer should be at our hands, bail.
+ ALOGE("received buffer multiple times from input");
mInput->consumerDisconnect();
onAbandoned_l(true /* isInput */);
return;
}
mBuffersFromInput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
+ // If flush happened while waiting for a buffer to be released, simply return it
+ // TRICKY: do it here after it is detached so that we don't have to cache mGraphicBuffer.
+ if (mReturnPendingInputFrame) {
+ mReturnPendingInputFrame = false;
+ returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
+ return;
+ }
+
mBufferItems.push_back(bufferItem);
if (mBufferItems.size() == 1) {
@@ -595,7 +670,7 @@ void MediaSync::onFrameAvailableFromInput() {
}
}
-void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {
+void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs) {
IGraphicBufferProducer::QueueBufferInput queueInput(
bufferItem.mTimestamp,
bufferItem.mIsAutoTimestamp,
@@ -635,6 +710,12 @@ void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {
mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
+
+ // If we have already queued more than one buffer, check for any free buffers in case
+ // one of them were dropped - as BQ does not signal onBufferReleased in that case.
+ if (mBuffersSentToOutput.size() > 1) {
+ (new AMessage(kWhatCheckFrameAvailable, this))->post(checkInUs);
+ }
}
void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
@@ -646,32 +727,38 @@ void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
sp<GraphicBuffer> buffer;
sp<Fence> fence;
- status_t status = mOutput->detachNextBuffer(&buffer, &fence);
- ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
+ status_t status;
+ // NOTE: This is a workaround for a BufferQueue bug where onBufferReleased is
+ // called only for released buffers, but not for buffers that were dropped during
+ // acquire. Dropped buffers can still be detached as they are on the free list.
+ // TODO: remove if released callback happens also for dropped buffers
+ while ((status = mOutput->detachNextBuffer(&buffer, &fence)) != NO_MEMORY) {
+ ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
- if (status == NO_INIT) {
- // If the output has been abandoned, we can't do anything else,
- // since buffer is invalid.
- onAbandoned_l(false /* isInput */);
- return;
- }
+ if (status == NO_INIT) {
+ // If the output has been abandoned, we can't do anything else,
+ // since buffer is invalid.
+ onAbandoned_l(false /* isInput */);
+ return;
+ }
- ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
+ ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
- // If we've been abandoned, we can't return the buffer to the input, so just
- // move on.
- if (mIsAbandoned) {
- return;
- }
+ // If we've been abandoned, we can't return the buffer to the input, so just
+ // move on.
+ if (mIsAbandoned) {
+ return;
+ }
- ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
- if (ix < 0) {
- // The buffer is unknown, maybe leftover, ignore.
- return;
- }
- mBuffersSentToOutput.removeItemsAt(ix);
+ ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
+ if (ix < 0) {
+ // The buffer is unknown, maybe leftover, ignore.
+ return;
+ }
+ mBuffersSentToOutput.removeItemsAt(ix);
- returnBufferToInput_l(buffer, fence);
+ returnBufferToInput_l(buffer, fence);
+ }
}
void MediaSync::returnBufferToInput_l(
@@ -679,6 +766,7 @@ void MediaSync::returnBufferToInput_l(
ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId());
if (ix < 0) {
// The buffer is unknown, something is wrong, bail.
+ ALOGE("output returned unknown buffer");
mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
onAbandoned_l(false /* isInput */);
return;
@@ -741,6 +829,12 @@ void MediaSync::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatCheckFrameAvailable:
+ {
+ onBufferReleasedByOutput(mOutput);
+ break;
+ }
+
default:
TRESPASS();
break;
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
index ce5f5fe..5fe9bf9 100644
--- a/media/libmediaplayerservice/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -28,8 +28,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
-
-#include "VideoFrameScheduler.h"
+#include <media/stagefright/VideoFrameScheduler.h>
namespace android {
@@ -56,7 +55,7 @@ static const size_t kMinSamplesToEstimatePeriod = 3;
static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
static const size_t kPrecision = 12;
-static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
+static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
static const int64_t kMultiplesThresholdDiv = 4; // 25%
static const int64_t kReFitThresholdDiv = 100; // 1%
static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
@@ -258,7 +257,8 @@ void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
mPhase = firstTime;
}
}
- ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod);
+ ALOGV("priming[%zu] phase:%lld period:%lld",
+ numSamplesToUse, (long long)mPhase, (long long)mPeriod);
}
nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
@@ -316,6 +316,10 @@ nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
return mPeriod;
}
+nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
+ return mPrimed ? mPeriod : 0;
+}
+
/* ======================================================================= */
/* Frame Scheduler */
/* ======================================================================= */
@@ -382,6 +386,14 @@ nsecs_t VideoFrameScheduler::getVsyncPeriod() {
return kDefaultVsyncPeriod;
}
+float VideoFrameScheduler::getFrameRate() {
+ nsecs_t videoPeriod = mPll.getPeriod();
+ if (videoPeriod > 0) {
+ return 1e9 / videoPeriod;
+ }
+ return 0.f;
+}
+
nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
nsecs_t origRenderTime = renderTime;
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 5c05a0e..1db350f 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -82,7 +82,10 @@ SoftHEVC::SoftHEVC(
initPorts(
kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
- CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+status_t SoftHEVC::init() {
+ return initDecoder();
}
SoftHEVC::~SoftHEVC() {
@@ -766,5 +769,10 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
android::SoftOMXComponent *createSoftOMXComponent(const char *name,
const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
OMX_COMPONENTTYPE **component) {
- return new android::SoftHEVC(name, callbacks, appData, component);
+ android::SoftHEVC *codec = new android::SoftHEVC(name, callbacks, appData, component);
+ if (codec->init() != android::OK) {
+ android::sp<android::SoftOMXComponent> release = codec;
+ return NULL;
+ }
+ return codec;
}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
index a91f528..c6344cf 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -56,6 +56,8 @@ struct SoftHEVC: public SoftVideoDecoderOMXComponent {
SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData, OMX_COMPONENTTYPE **component);
+ status_t init();
+
protected:
virtual ~SoftHEVC();
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index e64a7a1..0d0baf3 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -98,6 +98,7 @@ struct MyHandler : public AHandler {
enum {
kWhatConnected = 'conn',
kWhatDisconnected = 'disc',
+ kWhatSeekPaused = 'spau',
kWhatSeekDone = 'sdon',
kWhatAccessUnit = 'accU',
@@ -220,6 +221,12 @@ struct MyHandler : public AHandler {
msg->post();
}
+ void continueSeekAfterPause(int64_t timeUs) {
+ sp<AMessage> msg = new AMessage('see1', this);
+ msg->setInt64("time", timeUs);
+ msg->post();
+ }
+
bool isSeekable() const {
return mSeekable;
}
@@ -1180,7 +1187,7 @@ struct MyHandler : public AHandler {
mCheckPending = true;
++mCheckGeneration;
- sp<AMessage> reply = new AMessage('see1', this);
+ sp<AMessage> reply = new AMessage('see0', this);
reply->setInt64("time", timeUs);
if (mPausing) {
@@ -1203,9 +1210,26 @@ struct MyHandler : public AHandler {
break;
}
- case 'see1':
+ case 'see0':
{
// Session is paused now.
+ status_t err = OK;
+ msg->findInt32("result", &err);
+
+ int64_t timeUs;
+ CHECK(msg->findInt64("time", &timeUs));
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatSeekPaused);
+ notify->setInt32("err", err);
+ notify->setInt64("time", timeUs);
+ notify->post();
+ break;
+
+ }
+
+ case 'see1':
+ {
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
index 46b2725..a523656 100755
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
@@ -26,13 +26,8 @@ LOCAL_C_INCLUDES := \
LOCAL_SHARED_LIBRARIES := \
libaudiopolicyengineconfigurable \
libparameter \
- libicuuc \
- liblog \
-
-LOCAL_STATIC_LIBRARIES := \
libxmlserializer \
- libpfw_utility \
- libxml2 \
+ liblog \
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE := libpolicy-subsystem