summaryrefslogtreecommitdiffstats
path: root/include/media/stagefright/MediaSync.h
blob: ef8cb23d7f19d72e5cdb968c6c29941617abeaac (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
/*
 * Copyright 2015 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_SYNC_H
#define MEDIA_SYNC_H

#include <gui/IConsumerListener.h>
#include <gui/IProducerListener.h>

#include <media/AudioResamplerPublic.h>
#include <media/AVSyncSettings.h>
#include <media/stagefright/foundation/AHandler.h>

#include <utils/Condition.h>
#include <utils/KeyedVector.h>
#include <utils/Mutex.h>

namespace android {

class AudioTrack;
class BufferItem;
class Fence;
class GraphicBuffer;
class IGraphicBufferConsumer;
class IGraphicBufferProducer;
struct MediaClock;
struct VideoFrameScheduler;

// MediaSync manages media playback and its synchronization to a media clock
// source. It can be also used for video-only playback.
//
// For video playback, it requires an output surface and provides an input
// surface. It then controls the rendering of input buffers (buffer queued to
// the input surface) on the output surface to happen at the appropriate time.
//
// For audio playback, it requires an audio track and takes updates of
// information of rendered audio data to maintain media clock when audio track
// serves as media clock source. (TODO: move audio rendering from JAVA to
// native code).
//
// It can use the audio or video track as media clock source, as well as an
// external clock. (TODO: actually support external clock as media clock
// sources; use video track as media clock source for audio-and-video stream).
//
// In video-only mode, MediaSync will playback every video frame even though
// a video frame arrives late based on its timestamp and last frame's.
//
// The client needs to configure surface (for output video rendering) and audio
// track (for querying information of audio rendering) for MediaSync.
//
// Then the client needs to obtain a surface from MediaSync and render video
// frames onto that surface. Internally, the MediaSync will receive those video
// frames and render them onto the output surface at the appropriate time.
//
// The client needs to call updateQueuedAudioData() immediately after it writes
// audio data to the audio track. Such information will be used to update media
// clock.
//
class MediaSync : public AHandler {
public:
    // Create an instance of MediaSync.
    static sp<MediaSync> create();

    // Called when MediaSync is used to render video. It should be called
    // before createInputSurface().
    status_t setSurface(const sp<IGraphicBufferProducer> &output);

    // Called when audio track is used as media clock source. It should be
    // called before updateQueuedAudioData().
    status_t setAudioTrack(const sp<AudioTrack> &audioTrack);

    // Create a surface for client to render video frames. This is the surface
    // on which the client should render video frames. Those video frames will
    // be internally directed to output surface for rendering at appropriate
    // time.
    status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);

    // Update just-rendered audio data size and the presentation timestamp of
    // the first frame of that audio data. It should be called immediately
    // after the client write audio data into AudioTrack.
    // This function assumes continous audio stream.
    // TODO: support gap or backwards updates.
    status_t updateQueuedAudioData(
            size_t sizeInBytes, int64_t presentationTimeUs);

    // Set the consumer name of the input queue.
    void setName(const AString &name);

    // Get the media clock used by the MediaSync so that the client can obtain
    // corresponding media time or real time via
    // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
    sp<const MediaClock> getMediaClock();

    // Flush mediasync
    void flush();

    // Set the video frame rate hint - this is used by the video FrameScheduler
    status_t setVideoFrameRateHint(float rate);

    // Get the video frame rate measurement from the FrameScheduler
    // returns -1 if there is no measurement
    float getVideoFrameRate();

    // Set the sync settings parameters.
    status_t setSyncSettings(const AVSyncSettings &syncSettings);

    // Gets the sync settings parameters.
    void getSyncSettings(AVSyncSettings *syncSettings /* nonnull */);

    // Sets the playback rate using playback settings.
    // This method can be called any time.
    status_t setPlaybackSettings(const AudioPlaybackRate &rate);

    // Gets the playback rate (playback settings parameters).
    void getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);

    // Get the play time for pending audio frames in audio sink.
    status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);

protected:
    virtual void onMessageReceived(const sp<AMessage> &msg);

private:
    enum {
        kWhatDrainVideo = 'dVid',
    };

    // This is a thin wrapper class that lets us listen to
    // IConsumerListener::onFrameAvailable from mInput.
    class InputListener : public BnConsumerListener,
                          public IBinder::DeathRecipient {
    public:
        InputListener(const sp<MediaSync> &sync);
        virtual ~InputListener();

        // From IConsumerListener
        virtual void onFrameAvailable(const BufferItem &item);

        // From IConsumerListener
        // We don't care about released buffers because we detach each buffer as
        // soon as we acquire it. See the comment for onBufferReleased below for
        // some clarifying notes about the name.
        virtual void onBuffersReleased() {}

        // From IConsumerListener
        // We don't care about sideband streams, since we won't relay them.
        virtual void onSidebandStreamChanged();

        // From IBinder::DeathRecipient
        virtual void binderDied(const wp<IBinder> &who);

    private:
        sp<MediaSync> mSync;
    };

    // This is a thin wrapper class that lets us listen to
    // IProducerListener::onBufferReleased from mOutput.
    class OutputListener : public BnProducerListener,
                           public IBinder::DeathRecipient {
    public:
        OutputListener(const sp<MediaSync> &sync, const sp<IGraphicBufferProducer> &output);
        virtual ~OutputListener();

        // From IProducerListener
        virtual void onBufferReleased();

        // From IBinder::DeathRecipient
        virtual void binderDied(const wp<IBinder> &who);

    private:
        sp<MediaSync> mSync;
        sp<IGraphicBufferProducer> mOutput;
    };

    // mIsAbandoned is set to true when the input or output dies.
    // Once the MediaSync has been abandoned by one side, it will disconnect
    // from the other side and not attempt to communicate with it further.
    bool mIsAbandoned;

    mutable Mutex mMutex;
    Condition mReleaseCondition;
    size_t mNumOutstandingBuffers;
    sp<IGraphicBufferConsumer> mInput;
    sp<IGraphicBufferProducer> mOutput;
    int mUsageFlagsFromOutput;
    uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
    bool mReturnPendingInputFrame;    // set while we are pending before acquiring an input frame

    sp<AudioTrack> mAudioTrack;
    uint32_t mNativeSampleRateInHz;
    int64_t mNumFramesWritten;
    bool mHasAudio;

    int64_t mNextBufferItemMediaUs;
    List<BufferItem> mBufferItems;
    sp<VideoFrameScheduler> mFrameScheduler;

    // Keep track of buffers received from |mInput|. This is needed because
    // it's possible the consumer of |mOutput| could return a different
    // GraphicBuffer::handle (e.g., due to passing buffers through IPC),
    // and that could cause problem if the producer of |mInput| only
    // supports pre-registered buffers.
    KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersFromInput;

    // Keep track of buffers sent to |mOutput|. When a new output surface comes
    // in, those buffers will be returned to input and old output surface will
    // be disconnected immediately.
    KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersSentToOutput;

    sp<ALooper> mLooper;
    float mPlaybackRate;

    AudioPlaybackRate mPlaybackSettings;
    AVSyncSettings mSyncSettings;

    sp<MediaClock> mMediaClock;

    MediaSync();

    // Must be accessed through RefBase
    virtual ~MediaSync();

    int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
    int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
    int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);

    void onDrainVideo_l();

    // This implements the onFrameAvailable callback from IConsumerListener.
    // It gets called from an InputListener.
    // During this callback, we detach the buffer from the input, and queue
    // it for rendering on the output. This call can block if there are too
    // many outstanding buffers. If it blocks, it will resume when
    // onBufferReleasedByOutput releases a buffer back to the input.
    void onFrameAvailableFromInput();

    // Send |bufferItem| to the output for rendering.
    void renderOneBufferItem_l(const BufferItem &bufferItem);

    // This implements the onBufferReleased callback from IProducerListener.
    // It gets called from an OutputListener.
    // During this callback, we detach the buffer from the output, and release
    // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
    void onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output);

    // Return |buffer| back to the input.
    void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);

    // When this is called, the MediaSync disconnects from (i.e., abandons) its
    // input or output, and signals any waiting onFrameAvailable calls to wake
    // up. This must be called with mMutex locked.
    void onAbandoned_l(bool isInput);

    // Set the playback in a desired speed.
    // This method can be called any time.
    // |rate| is the ratio between desired speed and the normal one, and should
    // be non-negative. The meaning of rate values:
    // 1.0 -- normal playback
    // 0.0 -- stop or pause
    // larger than 1.0 -- faster than normal speed
    // between 0.0 and 1.0 -- slower than normal speed
    void updatePlaybackRate_l(float rate);

    // apply new sync settings
    void resync_l();

    // apply playback settings only - without resyncing or updating playback rate
    status_t setPlaybackSettings_l(const AudioPlaybackRate &rate);

    // helper.
    bool isPlaying() { return mPlaybackRate != 0.0; }

    DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
};

} // namespace android

#endif