summaryrefslogtreecommitdiffstats
path: root/media/java/android/media/MediaSync.java
diff options
context:
space:
mode:
Diffstat (limited to 'media/java/android/media/MediaSync.java')
-rw-r--r--media/java/android/media/MediaSync.java208
1 files changed, 60 insertions, 148 deletions
diff --git a/media/java/android/media/MediaSync.java b/media/java/android/media/MediaSync.java
index a5b0d39..b07931d 100644
--- a/media/java/android/media/MediaSync.java
+++ b/media/java/android/media/MediaSync.java
@@ -20,7 +20,7 @@ import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.media.AudioTrack;
-import android.media.PlaybackSettings;
+import android.media.PlaybackParams;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
@@ -49,7 +49,7 @@ import java.util.List;
* sync.setAudioTrack(audioTrack);
* sync.setCallback(new MediaSync.Callback() {
* {@literal @Override}
- * public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferIndex) {
+ * public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferId) {
* ...
* }
* }, null);
@@ -62,8 +62,8 @@ import java.util.List;
* // videoDecoder.releaseOutputBuffer(videoOutputBufferIx, videoPresentationTimeNs);
* // More details are available as below.
* ...
- * sync.queueAudio(audioByteBuffer, bufferIndex, size, audioPresentationTimeUs); // non-blocking.
- * // The audioByteBuffer and bufferIndex will be returned via callback.
+ * sync.queueAudio(audioByteBuffer, bufferId, audioPresentationTimeUs); // non-blocking.
+ * // The audioByteBuffer and bufferId will be returned via callback.
* // More details are available as below.
* ...
* ...
@@ -75,22 +75,22 @@ import java.util.List;
* // The following code snippet illustrates how video/audio raw frames are created by
* // MediaCodec's, how they are fed to MediaSync and how they are returned by MediaSync.
* // This is the callback from MediaCodec.
- * onOutputBufferAvailable(MediaCodec codec, int bufferIndex, BufferInfo info) {
+ * onOutputBufferAvailable(MediaCodec codec, int bufferId, BufferInfo info) {
* // ...
* if (codec == videoDecoder) {
* // surface timestamp must contain media presentation time in nanoseconds.
- * codec.releaseOutputBuffer(bufferIndex, 1000 * info.presentationTime);
+ * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime);
* } else {
- * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferIndex);
- * sync.queueByteBuffer(audioByteBuffer, bufferIndex, info.size, info.presentationTime);
+ * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId);
+ * sync.queueByteBuffer(audioByteBuffer, bufferId, info.size, info.presentationTime);
* }
* // ...
* }
*
* // This is the callback from MediaSync.
- * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferIndex) {
+ * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferId) {
* // ...
- * audioDecoder.releaseBuffer(bufferIndex, false);
+ * audioDecoder.releaseBuffer(bufferId, false);
* // ...
* }
*
@@ -112,7 +112,7 @@ import java.util.List;
* initial underrun.
* <p>
*/
-final public class MediaSync {
+public final class MediaSync {
/**
* MediaSync callback interface. Used to notify the user asynchronously
* of various MediaSync events.
@@ -123,10 +123,11 @@ final public class MediaSync {
*
* @param sync The MediaSync object.
* @param audioBuffer The returned audio buffer.
- * @param bufferIndex The index associated with the audio buffer
+ * @param bufferId The ID associated with audioBuffer as passed into
+ * {@link MediaSync#queueAudio}.
*/
public abstract void onAudioBufferConsumed(
- @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferIndex);
+ @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferId);
}
/** Audio track failed.
@@ -170,14 +171,12 @@ final public class MediaSync {
private static class AudioBuffer {
public ByteBuffer mByteBuffer;
public int mBufferIndex;
- public int mSizeInBytes;
long mPresentationTimeUs;
- public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferIndex,
- int sizeInBytes, long presentationTimeUs) {
+ public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferId,
+ long presentationTimeUs) {
mByteBuffer = byteBuffer;
- mBufferIndex = bufferIndex;
- mSizeInBytes = sizeInBytes;
+ mBufferIndex = bufferId;
mPresentationTimeUs = presentationTimeUs;
}
}
@@ -353,98 +352,15 @@ final public class MediaSync {
public native final Surface createInputSurface();
/**
- * Resample audio data when changing playback speed.
+ * Sets playback rate using {@link PlaybackParams}.
* <p>
- * Resample the waveform based on the requested playback rate to get
- * a new waveform, and play back the new waveform at the original sampling
- * frequency.
- * <p><ul>
- * <li>When rate is larger than 1.0, pitch becomes higher.
- * <li>When rate is smaller than 1.0, pitch becomes lower.
- * </ul>
- */
- public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
-
- /**
- * Time stretch audio when changing playback speed.
- * <p>
- * Time stretching changes the duration of the audio samples without
- * affecting their pitch. This is only supported for a limited range
- * of playback speeds, e.g. from 1/2x to 2x. If the rate is adjusted
- * beyond this limit, the rate change will fail.
- */
- public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
-
- /**
- * Time stretch audio when changing playback speed, and may mute if
- * stretching is no longer supported.
- * <p>
- * Time stretching changes the duration of the audio samples without
- * affecting their pitch. This is only supported for a limited range
- * of playback speeds, e.g. from 1/2x to 2x. When it is no longer
- * supported, the audio may be muted. Using this mode will not fail
- * for non-negative playback rates.
- */
- public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
-
- /** @hide */
- @IntDef(
- value = {
- PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
- PLAYBACK_RATE_AUDIO_MODE_STRETCH,
- PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
- })
- @Retention(RetentionPolicy.SOURCE)
- public @interface PlaybackRateAudioMode {}
-
- /**
- * Sets playback rate and audio mode.
- *
- * @param rate the ratio between desired playback rate and normal one. 1.0 means normal
- * playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
- * while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
- * does not change as a result of this call. To restore the original rate at any time,
- * use 1.0.
- * @param audioMode audio playback mode. Must be one of the supported
- * audio modes.
- *
- * @throws IllegalStateException if the internal sync engine or the audio track has not
- * been initialized.
- * @throws IllegalArgumentException if audioMode is not supported.
- */
- public void setPlaybackRate(float rate, @PlaybackRateAudioMode int audioMode) {
- PlaybackSettings rateSettings = new PlaybackSettings();
- rateSettings.allowDefaults();
- switch (audioMode) {
- case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
- rateSettings.setSpeed(rate).setPitch(1.0f);
- break;
- case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
- rateSettings.setSpeed(rate).setPitch(1.0f)
- .setAudioFallbackMode(rateSettings.AUDIO_FALLBACK_MODE_FAIL);
- break;
- case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
- rateSettings.setSpeed(rate).setPitch(rate);
- break;
- default:
- {
- final String msg = "Audio playback mode " + audioMode + " is not supported";
- throw new IllegalArgumentException(msg);
- }
- }
- setPlaybackSettings(rateSettings);
- }
-
- /**
- * Sets playback rate using {@link PlaybackSettings}.
- * <p>
- * When using MediaSync with {@link AudioTrack}, set playback settings using this
+ * When using MediaSync with {@link AudioTrack}, set playback params using this
* call instead of calling it directly on the track, so that the sync is aware of
- * the settings change.
+ * the params change.
* <p>
* This call also works if there is no audio track.
*
- * @param settings the playback settings to use. {@link PlaybackSettings#getSpeed
+ * @param params the playback params to use. {@link PlaybackParams#getSpeed
* Speed} is the ratio between desired playback rate and normal one. 1.0 means
* normal playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
* while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
@@ -453,11 +369,11 @@ final public class MediaSync {
*
* @throws IllegalStateException if the internal sync engine or the audio track has not
* been initialized.
- * @throws IllegalArgumentException if the settings are not supported.
+ * @throws IllegalArgumentException if the params are not supported.
*/
- public void setPlaybackSettings(@NonNull PlaybackSettings settings) {
+ public void setPlaybackParams(@NonNull PlaybackParams params) {
synchronized(mAudioLock) {
- mPlaybackRate = native_setPlaybackSettings(settings);;
+ mPlaybackRate = native_setPlaybackParams(params);;
}
if (mPlaybackRate != 0.0 && mAudioThread != null) {
postRenderAudio(0);
@@ -465,7 +381,7 @@ final public class MediaSync {
}
/**
- * Gets the playback rate using {@link PlaybackSettings}.
+ * Gets the playback rate using {@link PlaybackParams}.
*
* @return the playback rate being used.
*
@@ -473,40 +389,40 @@ final public class MediaSync {
* been initialized.
*/
@NonNull
- public native PlaybackSettings getPlaybackSettings();
+ public native PlaybackParams getPlaybackParams();
- private native float native_setPlaybackSettings(@NonNull PlaybackSettings settings);
+ private native float native_setPlaybackParams(@NonNull PlaybackParams params);
/**
* Sets A/V sync mode.
*
- * @param settings the A/V sync settings to apply
+ * @param params the A/V sync params to apply
*
* @throws IllegalStateException if the internal player engine has not been
* initialized.
- * @throws IllegalArgumentException if settings are not supported.
+ * @throws IllegalArgumentException if params are not supported.
*/
- public void setSyncSettings(@NonNull SyncSettings settings) {
+ public void setSyncParams(@NonNull SyncParams params) {
synchronized(mAudioLock) {
- mPlaybackRate = native_setSyncSettings(settings);;
+ mPlaybackRate = native_setSyncParams(params);;
}
if (mPlaybackRate != 0.0 && mAudioThread != null) {
postRenderAudio(0);
}
}
- private native float native_setSyncSettings(@NonNull SyncSettings settings);
+ private native float native_setSyncParams(@NonNull SyncParams params);
/**
* Gets the A/V sync mode.
*
- * @return the A/V sync settings
+ * @return the A/V sync params
*
* @throws IllegalStateException if the internal player engine has not been
* initialized.
*/
@NonNull
- public native SyncSettings getSyncSettings();
+ public native SyncParams getSyncParams();
/**
* Flushes all buffers from the sync object.
@@ -525,24 +441,23 @@ final public class MediaSync {
}
/**
- * Get current playback position.
- * <p>
- * The MediaTimestamp represents how the media time correlates to the system time in
- * a linear fashion. It contains the media time and system timestamp of an anchor frame
- * ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime})
- * and the speed of the media clock ({@link MediaTimestamp#clockRate}).
- * <p>
- * During regular playback, the media time moves fairly constantly (though the
- * anchor frame may be rebased to a current system time, the linear correlation stays
- * steady). Therefore, this method does not need to be called often.
- * <p>
- * To help users to get current playback position, this method always returns the timestamp of
- * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They
- * can be used as current playback position.
- *
- * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
- * is available, e.g. because the media sync has not been initialized.
- */
+ * Get current playback position.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion using an anchor and a clock rate. During regular playback, the media
+ * time moves fairly constantly (though the anchor frame may be rebased to a current
+ * system time, the linear correlation stays steady). Therefore, this method does not
+ * need to be called often.
+ * <p>
+ * To help users get current playback position, this method always anchors the timestamp
+ * to the current {@link System#nanoTime system time}, so
+ * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ *
+ * @see MediaTimestamp
+ */
@Nullable
public MediaTimestamp getTimestamp()
{
@@ -565,25 +480,23 @@ final public class MediaSync {
* Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode).
* @param audioData the buffer that holds the data to play. This buffer will be returned
* to the client via registered callback.
- * @param bufferIndex the buffer index used to identify audioData. It will be returned to
- * the client along with audioData. This helps applications to keep track of audioData.
- * @param sizeInBytes number of bytes to queue.
+ * @param bufferId an integer used to identify audioData. It will be returned to
+ * the client along with audioData. This helps applications to keep track of audioData,
+ * e.g., it can be used to store the output buffer index used by the audio codec.
* @param presentationTimeUs the presentation timestamp in microseconds for the first frame
* in the buffer.
* @throws IllegalStateException if audio track is not set or internal configureation
* has not been done correctly.
*/
public void queueAudio(
- @NonNull ByteBuffer audioData, int bufferIndex, int sizeInBytes,
- long presentationTimeUs) {
+ @NonNull ByteBuffer audioData, int bufferId, long presentationTimeUs) {
if (mAudioTrack == null || mAudioThread == null) {
throw new IllegalStateException(
"AudioTrack is NOT set or audio thread is not created");
}
synchronized(mAudioLock) {
- mAudioBuffers.add(new AudioBuffer(
- audioData, bufferIndex, sizeInBytes, presentationTimeUs));
+ mAudioBuffers.add(new AudioBuffer(audioData, bufferId, presentationTimeUs));
}
if (mPlaybackRate != 0.0) {
@@ -605,18 +518,19 @@ final public class MediaSync {
}
AudioBuffer audioBuffer = mAudioBuffers.get(0);
+ int size = audioBuffer.mByteBuffer.remaining();
int sizeWritten = mAudioTrack.write(
audioBuffer.mByteBuffer,
- audioBuffer.mSizeInBytes,
+ size,
AudioTrack.WRITE_NON_BLOCKING);
if (sizeWritten > 0) {
if (audioBuffer.mPresentationTimeUs != -1) {
native_updateQueuedAudioData(
- audioBuffer.mSizeInBytes, audioBuffer.mPresentationTimeUs);
+ size, audioBuffer.mPresentationTimeUs);
audioBuffer.mPresentationTimeUs = -1;
}
- if (sizeWritten == audioBuffer.mSizeInBytes) {
+ if (sizeWritten == size) {
postReturnByteBuffer(audioBuffer);
mAudioBuffers.remove(0);
if (!mAudioBuffers.isEmpty()) {
@@ -624,8 +538,6 @@ final public class MediaSync {
}
return;
}
-
- audioBuffer.mSizeInBytes -= sizeWritten;
}
long pendingTimeMs = TimeUnit.MICROSECONDS.toMillis(
native_getPlayTimeForPendingAudioFrames());