summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorWei Jia <wjia@google.com>2015-03-24 16:29:21 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2015-03-24 16:29:22 +0000
commit6ce2ed7e9d003c4580e2813d657660ef18865df6 (patch)
tree18b29c59043564dfd0ca9f27c219749e3da908f2 /include
parentec648447b7dcab701c436a54bd38a1abd8037be1 (diff)
parentc8db9712d9abe9b0d74193ea7d7cff428e32e62c (diff)
downloadframeworks_av-6ce2ed7e9d003c4580e2813d657660ef18865df6.zip
frameworks_av-6ce2ed7e9d003c4580e2813d657660ef18865df6.tar.gz
frameworks_av-6ce2ed7e9d003c4580e2813d657660ef18865df6.tar.bz2
Merge "MediaSync: support MediaSync in native code."
Diffstat (limited to 'include')
-rw-r--r--include/media/stagefright/MediaClock.h24
-rw-r--r--include/media/stagefright/MediaSync.h239
2 files changed, 252 insertions, 11 deletions
diff --git a/include/media/stagefright/MediaClock.h b/include/media/stagefright/MediaClock.h
index 660764f..e9c09a1 100644
--- a/include/media/stagefright/MediaClock.h
+++ b/include/media/stagefright/MediaClock.h
@@ -35,9 +35,9 @@ struct MediaClock : public RefBase {
// It's required to use timestamp of just rendered frame as
// anchor time in paused state.
void updateAnchor(
- int64_t anchorTimeMediaUs,
- int64_t anchorTimeRealUs,
- int64_t maxTimeMediaUs = INT64_MAX);
+ int64_t anchorTimeMediaUs,
+ int64_t anchorTimeRealUs,
+ int64_t maxTimeMediaUs = INT64_MAX);
void updateMaxTimeMedia(int64_t maxTimeMediaUs);
@@ -45,22 +45,24 @@ struct MediaClock : public RefBase {
// query media time corresponding to real time |realUs|, and save the
// result in |outMediaUs|.
- status_t getMediaTime(int64_t realUs,
- int64_t *outMediaUs,
- bool allowPastMaxTime = false);
+ status_t getMediaTime(
+ int64_t realUs,
+ int64_t *outMediaUs,
+ bool allowPastMaxTime = false) const;
// query real time corresponding to media time |targetMediaUs|.
// The result is saved in |outRealUs|.
- status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs);
+ status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const;
protected:
virtual ~MediaClock();
private:
- status_t getMediaTime_l(int64_t realUs,
- int64_t *outMediaUs,
- bool allowPastMaxTime);
+ status_t getMediaTime_l(
+ int64_t realUs,
+ int64_t *outMediaUs,
+ bool allowPastMaxTime) const;
- Mutex mLock;
+ mutable Mutex mLock;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
new file mode 100644
index 0000000..8bb8c7f
--- /dev/null
+++ b/include/media/stagefright/MediaSync.h
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_SYNC_H
+#define MEDIA_SYNC_H
+
+#include <gui/IConsumerListener.h>
+#include <gui/IProducerListener.h>
+
+#include <media/stagefright/foundation/AHandler.h>
+
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+class AudioTrack;
+class BufferItem;
+class Fence;
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+struct MediaClock;
+
+// MediaSync manages media playback and its synchronization to a media clock
+// source. It can be also used for video-only playback.
+//
+// For video playback, it requires an output surface and provides an input
+// surface. It then controls the rendering of input buffers (buffer queued to
+// the input surface) on the output surface to happen at the appropriate time.
+//
+// For audio playback, it requires an audio track and takes updates of
+// information of rendered audio data to maintain media clock when audio track
+// serves as media clock source. (TODO: move audio rendering from JAVA to
+// native code).
+//
+// It can use the audio or video track as media clock source, as well as an
+// external clock. (TODO: actually support external clock as media clock
+// sources; use video track as media clock source for audio-and-video stream).
+//
+// In video-only mode, MediaSync will playback every video frame even though
+// a video frame arrives late based on its timestamp and last frame's.
+//
+// The client needs to configure surface (for output video rendering) and audio
+// track (for querying information of audio rendering) for MediaSync.
+//
+// Then the client needs to obtain a surface from MediaSync and render video
+// frames onto that surface. Internally, the MediaSync will receive those video
+// frames and render them onto the output surface at the appropriate time.
+//
+// The client needs to call updateQueuedAudioData() immediately after it writes
+// audio data to the audio track. Such information will be used to update media
+// clock.
+//
+class MediaSync : public AHandler {
+public:
+ // Create an instance of MediaSync.
+ static sp<MediaSync> create();
+
+ // Called when MediaSync is used to render video. It should be called
+ // before createInputSurface().
+ status_t configureSurface(const sp<IGraphicBufferProducer> &output);
+
+ // Called when audio track is used as media clock source. It should be
+ // called before updateQueuedAudioData().
+ // |nativeSampleRateInHz| is the sample rate of audio data fed into audio
+ // track. It's the same number used to create AudioTrack.
+ status_t configureAudioTrack(
+ const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz);
+
+ // Create a surface for client to render video frames. This is the surface
+ // on which the client should render video frames. Those video frames will
+ // be internally directed to output surface for rendering at appropriate
+ // time.
+ status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);
+
+ // Update just-rendered audio data size and the presentation timestamp of
+ // the first frame of that audio data. It should be called immediately
+ // after the client write audio data into AudioTrack.
+ // This function assumes continous audio stream.
+ // TODO: support gap or backwards updates.
+ status_t updateQueuedAudioData(
+ size_t sizeInBytes, int64_t presentationTimeUs);
+
+ // Set the consumer name of the input queue.
+ void setName(const AString &name);
+
+ // Set the playback in a desired speed.
+ // This method can be called any time.
+ // |rate| is the ratio between desired speed and the normal one, and should
+ // be non-negative. The meaning of rate values:
+ // 1.0 -- normal playback
+ // 0.0 -- stop or pause
+ // larger than 1.0 -- faster than normal speed
+ // between 0.0 and 1.0 -- slower than normal speed
+ status_t setPlaybackRate(float rate);
+
+ // Get the media clock used by the MediaSync so that the client can obtain
+ // corresponding media time or real time via
+ // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
+ sp<const MediaClock> getMediaClock();
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatDrainVideo = 'dVid',
+ };
+
+ static const int MAX_OUTSTANDING_BUFFERS = 2;
+
+ // This is a thin wrapper class that lets us listen to
+ // IConsumerListener::onFrameAvailable from mInput.
+ class InputListener : public BnConsumerListener,
+ public IBinder::DeathRecipient {
+ public:
+ InputListener(const sp<MediaSync> &sync);
+ virtual ~InputListener();
+
+ // From IConsumerListener
+ virtual void onFrameAvailable(const BufferItem &item);
+
+ // From IConsumerListener
+ // We don't care about released buffers because we detach each buffer as
+ // soon as we acquire it. See the comment for onBufferReleased below for
+ // some clarifying notes about the name.
+ virtual void onBuffersReleased() {}
+
+ // From IConsumerListener
+ // We don't care about sideband streams, since we won't relay them.
+ virtual void onSidebandStreamChanged();
+
+ // From IBinder::DeathRecipient
+ virtual void binderDied(const wp<IBinder> &who);
+
+ private:
+ sp<MediaSync> mSync;
+ };
+
+ // This is a thin wrapper class that lets us listen to
+ // IProducerListener::onBufferReleased from mOutput.
+ class OutputListener : public BnProducerListener,
+ public IBinder::DeathRecipient {
+ public:
+ OutputListener(const sp<MediaSync> &sync);
+ virtual ~OutputListener();
+
+ // From IProducerListener
+ virtual void onBufferReleased();
+
+ // From IBinder::DeathRecipient
+ virtual void binderDied(const wp<IBinder> &who);
+
+ private:
+ sp<MediaSync> mSync;
+ };
+
+ // mIsAbandoned is set to true when the input or output dies.
+ // Once the MediaSync has been abandoned by one side, it will disconnect
+ // from the other side and not attempt to communicate with it further.
+ bool mIsAbandoned;
+
+ mutable Mutex mMutex;
+ Condition mReleaseCondition;
+ size_t mNumOutstandingBuffers;
+ sp<IGraphicBufferConsumer> mInput;
+ sp<IGraphicBufferProducer> mOutput;
+
+ sp<AudioTrack> mAudioTrack;
+ uint32_t mNativeSampleRateInHz;
+ int64_t mNumFramesWritten;
+ bool mHasAudio;
+
+ int64_t mNextBufferItemMediaUs;
+ List<BufferItem> mBufferItems;
+ sp<ALooper> mLooper;
+ float mPlaybackRate;
+
+ sp<MediaClock> mMediaClock;
+
+ MediaSync();
+
+ // Must be accessed through RefBase
+ virtual ~MediaSync();
+
+ int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
+ int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
+ int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);
+
+ void onDrainVideo_l();
+
+ // This implements the onFrameAvailable callback from IConsumerListener.
+ // It gets called from an InputListener.
+ // During this callback, we detach the buffer from the input, and queue
+ // it for rendering on the output. This call can block if there are too
+ // many outstanding buffers. If it blocks, it will resume when
+ // onBufferReleasedByOutput releases a buffer back to the input.
+ void onFrameAvailableFromInput();
+
+ // Send |bufferItem| to the output for rendering.
+ void renderOneBufferItem_l(const BufferItem &bufferItem);
+
+ // This implements the onBufferReleased callback from IProducerListener.
+ // It gets called from an OutputListener.
+ // During this callback, we detach the buffer from the output, and release
+ // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
+ void onBufferReleasedByOutput();
+
+ // Return |buffer| back to the input.
+ void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
+
+ // When this is called, the MediaSync disconnects from (i.e., abandons) its
+ // input or output, and signals any waiting onFrameAvailable calls to wake
+ // up. This must be called with mMutex locked.
+ void onAbandoned_l(bool isInput);
+
+ // helper.
+ bool isPlaying() { return mPlaybackRate != 0.0; }
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
+};
+
+} // namespace android
+
+#endif