summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AsyncPlayer.java42
-rw-r--r--media/java/android/media/AudioAttributes.java77
-rw-r--r--media/java/android/media/AudioDeviceCallback.java40
-rw-r--r--media/java/android/media/AudioDeviceInfo.java (renamed from media/java/android/media/AudioDevice.java)118
-rw-r--r--media/java/android/media/AudioDevicePort.java15
-rw-r--r--media/java/android/media/AudioFocusInfo.java5
-rw-r--r--media/java/android/media/AudioFormat.java354
-rw-r--r--media/java/android/media/AudioManager.java1019
-rw-r--r--media/java/android/media/AudioManagerInternal.java23
-rw-r--r--media/java/android/media/AudioMixPort.java22
-rw-r--r--media/java/android/media/AudioPort.java24
-rw-r--r--media/java/android/media/AudioPortEventHandler.java18
-rw-r--r--media/java/android/media/AudioRecord.java705
-rw-r--r--media/java/android/media/AudioRoutesInfo.java27
-rw-r--r--media/java/android/media/AudioService.java6006
-rw-r--r--media/java/android/media/AudioSystem.java163
-rw-r--r--media/java/android/media/AudioTrack.java1047
-rw-r--r--media/java/android/media/ClosedCaptionRenderer.java12
-rw-r--r--media/java/android/media/DataSource.java43
-rw-r--r--media/java/android/media/FocusRequester.java328
-rw-r--r--media/java/android/media/IAudioService.aidl42
-rw-r--r--media/java/android/media/IVolumeController.aidl2
-rw-r--r--media/java/android/media/Image.java160
-rw-r--r--media/java/android/media/ImageReader.java274
-rw-r--r--media/java/android/media/ImageUtils.java178
-rw-r--r--media/java/android/media/ImageWriter.java800
-rw-r--r--media/java/android/media/MediaCodec.java2071
-rw-r--r--media/java/android/media/MediaCodecInfo.java356
-rw-r--r--media/java/android/media/MediaCodecList.java11
-rw-r--r--media/java/android/media/MediaCrypto.java28
-rw-r--r--media/java/android/media/MediaCryptoException.java8
-rw-r--r--media/java/android/media/MediaDataSource.java61
-rw-r--r--media/java/android/media/MediaDescription.java43
-rw-r--r--media/java/android/media/MediaDrm.java577
-rw-r--r--media/java/android/media/MediaExtractor.java74
-rw-r--r--media/java/android/media/MediaFocusControl.java2197
-rw-r--r--media/java/android/media/MediaFormat.java79
-rw-r--r--media/java/android/media/MediaHTTPConnection.java6
-rw-r--r--media/java/android/media/MediaHTTPService.java2
-rw-r--r--media/java/android/media/MediaMetadata.java1
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java21
-rw-r--r--media/java/android/media/MediaMuxer.java41
-rw-r--r--media/java/android/media/MediaPlayer.java641
-rw-r--r--media/java/android/media/MediaRecorder.java55
-rw-r--r--media/java/android/media/MediaRouter.java47
-rw-r--r--media/java/android/media/MediaScanner.java11
-rw-r--r--media/java/android/media/MediaSync.java617
-rw-r--r--media/java/android/media/MediaTimestamp.java85
-rw-r--r--media/java/android/media/PlaybackParams.aidl19
-rw-r--r--media/java/android/media/PlaybackParams.java250
-rw-r--r--media/java/android/media/PlayerRecord.java357
-rw-r--r--media/java/android/media/RemoteControlClient.java1
-rw-r--r--media/java/android/media/RemoteDisplay.java13
-rw-r--r--media/java/android/media/RingtoneManager.java15
-rw-r--r--media/java/android/media/SoundPool.java588
-rw-r--r--media/java/android/media/SubtitleController.java4
-rw-r--r--media/java/android/media/SubtitleTrack.java7
-rw-r--r--media/java/android/media/SyncParams.java288
-rw-r--r--media/java/android/media/TimedMetaData.java78
-rw-r--r--media/java/android/media/TtmlRenderer.java12
-rw-r--r--media/java/android/media/Utils.java2
-rw-r--r--media/java/android/media/VolumePolicy.aidl19
-rw-r--r--media/java/android/media/VolumePolicy.java102
-rw-r--r--media/java/android/media/audiofx/AcousticEchoCanceler.java3
-rw-r--r--media/java/android/media/audiofx/AudioEffect.java6
-rw-r--r--media/java/android/media/audiofx/AutomaticGainControl.java3
-rw-r--r--media/java/android/media/audiofx/NoiseSuppressor.java3
-rw-r--r--media/java/android/media/audiofx/Virtualizer.java46
-rw-r--r--media/java/android/media/audiofx/Visualizer.java8
-rw-r--r--media/java/android/media/audiopolicy/AudioMix.java67
-rw-r--r--media/java/android/media/audiopolicy/AudioPolicy.java24
-rw-r--r--media/java/android/media/audiopolicy/AudioPolicyConfig.java12
-rw-r--r--media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl3
-rw-r--r--media/java/android/media/browse/MediaBrowser.java92
-rw-r--r--media/java/android/media/midi/IMidiDeviceListener.aidl28
-rw-r--r--media/java/android/media/midi/IMidiDeviceServer.aidl33
-rw-r--r--media/java/android/media/midi/IMidiManager.aidl56
-rw-r--r--media/java/android/media/midi/MidiDevice.java195
-rw-r--r--media/java/android/media/midi/MidiDeviceInfo.aidl19
-rw-r--r--media/java/android/media/midi/MidiDeviceInfo.java355
-rw-r--r--media/java/android/media/midi/MidiDeviceServer.java362
-rw-r--r--media/java/android/media/midi/MidiDeviceService.java136
-rw-r--r--media/java/android/media/midi/MidiDeviceStatus.aidl19
-rw-r--r--media/java/android/media/midi/MidiDeviceStatus.java138
-rw-r--r--media/java/android/media/midi/MidiInputPort.java154
-rw-r--r--media/java/android/media/midi/MidiManager.java362
-rw-r--r--media/java/android/media/midi/MidiOutputPort.java155
-rw-r--r--media/java/android/media/midi/MidiPortImpl.java134
-rw-r--r--media/java/android/media/midi/MidiReceiver.java133
-rw-r--r--media/java/android/media/midi/MidiSender.java62
-rw-r--r--media/java/android/media/midi/package.html321
-rw-r--r--media/java/android/media/projection/MediaProjection.java1
-rw-r--r--media/java/android/media/projection/MediaProjectionManager.java1
-rw-r--r--media/java/android/media/session/ISessionCallback.aidl5
-rw-r--r--media/java/android/media/session/ISessionController.aidl5
-rw-r--r--media/java/android/media/session/MediaController.java27
-rw-r--r--media/java/android/media/session/MediaSession.java22
-rw-r--r--media/java/android/media/session/MediaSessionLegacyHelper.java19
-rw-r--r--media/java/android/media/session/MediaSessionManager.java9
-rw-r--r--media/java/android/media/session/PlaybackState.java19
-rw-r--r--media/java/android/media/tv/DvbDeviceInfo.aidl20
-rw-r--r--media/java/android/media/tv/DvbDeviceInfo.java97
-rw-r--r--media/java/android/media/tv/ITvInputClient.aidl3
-rw-r--r--media/java/android/media/tv/ITvInputManager.aidl15
-rw-r--r--media/java/android/media/tv/ITvInputSession.aidl9
-rw-r--r--media/java/android/media/tv/ITvInputSessionCallback.aidl3
-rw-r--r--media/java/android/media/tv/ITvInputSessionWrapper.java77
-rw-r--r--media/java/android/media/tv/TvContentRating.java129
-rw-r--r--media/java/android/media/tv/TvContract.java578
-rw-r--r--media/java/android/media/tv/TvInputInfo.java49
-rw-r--r--media/java/android/media/tv/TvInputManager.java575
-rw-r--r--media/java/android/media/tv/TvInputService.java514
-rw-r--r--media/java/android/media/tv/TvTrackInfo.java79
-rw-r--r--media/java/android/media/tv/TvView.java282
-rwxr-xr-xmedia/java/android/mtp/MtpDatabase.java1
-rw-r--r--media/java/android/mtp/MtpStorage.java4
-rw-r--r--media/java/android/service/media/IMediaBrowserService.aidl2
-rw-r--r--media/java/android/service/media/MediaBrowserService.java111
-rw-r--r--media/jni/Android.mk13
-rw-r--r--media/jni/android_media_ImageReader.cpp471
-rw-r--r--media/jni/android_media_ImageWriter.cpp1083
-rw-r--r--media/jni/android_media_MediaCodec.cpp248
-rw-r--r--media/jni/android_media_MediaCodec.h5
-rw-r--r--media/jni/android_media_MediaCodecList.cpp25
-rw-r--r--media/jni/android_media_MediaCrypto.cpp43
-rw-r--r--media/jni/android_media_MediaDataSource.cpp153
-rw-r--r--media/jni/android_media_MediaDataSource.h73
-rw-r--r--media/jni/android_media_MediaDrm.cpp78
-rw-r--r--media/jni/android_media_MediaExtractor.cpp79
-rw-r--r--media/jni/android_media_MediaHTTPConnection.cpp1
-rw-r--r--media/jni/android_media_MediaMetadataRetriever.cpp37
-rw-r--r--media/jni/android_media_MediaPlayer.cpp204
-rw-r--r--media/jni/android_media_MediaRecorder.cpp33
-rw-r--r--media/jni/android_media_MediaSync.cpp551
-rw-r--r--media/jni/android_media_MediaSync.h65
-rw-r--r--media/jni/android_media_PlaybackParams.h120
-rw-r--r--media/jni/android_media_SyncParams.cpp91
-rw-r--r--media/jni/android_media_SyncParams.h65
-rw-r--r--media/jni/audioeffect/android_media_AudioEffect.cpp10
-rw-r--r--media/jni/audioeffect/android_media_Visualizer.cpp11
-rw-r--r--media/jni/soundpool/Android.mk2
-rw-r--r--media/jni/soundpool/SoundPool.cpp112
-rw-r--r--media/jni/soundpool/SoundPool.h8
-rw-r--r--media/jni/soundpool/android_media_SoundPool.cpp (renamed from media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp)90
-rw-r--r--media/lib/remotedisplay/java/com/android/media/remotedisplay/RemoteDisplayProvider.java2
-rw-r--r--media/packages/BluetoothMidiService/Android.mk11
-rw-r--r--media/packages/BluetoothMidiService/AndroidManifest.xml17
-rw-r--r--media/packages/BluetoothMidiService/res/values/strings.xml19
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java278
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java61
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java115
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java220
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java109
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java33
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java41
-rw-r--r--media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java2
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java1
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CodecTest.java21
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java96
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java41
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java34
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java25
-rw-r--r--media/tests/contents/media_api/video/MPEG2_1500_AAC_128.mp4bin0 -> 2100454 bytes
163 files changed, 18817 insertions, 11896 deletions
diff --git a/media/java/android/media/AsyncPlayer.java b/media/java/android/media/AsyncPlayer.java
index 14b199e..dd5f6ba 100644
--- a/media/java/android/media/AsyncPlayer.java
+++ b/media/java/android/media/AsyncPlayer.java
@@ -16,6 +16,7 @@
package android.media;
+import android.annotation.NonNull;
import android.content.Context;
import android.net.Uri;
import android.os.PowerManager;
@@ -38,11 +39,11 @@ public class AsyncPlayer {
Context context;
Uri uri;
boolean looping;
- int stream;
+ AudioAttributes attributes;
long requestTime;
public String toString() {
- return "{ code=" + code + " looping=" + looping + " stream=" + stream
+ return "{ code=" + code + " looping=" + looping + " attr=" + attributes
+ " uri=" + uri + " }";
}
}
@@ -56,7 +57,7 @@ public class AsyncPlayer {
try {
if (mDebug) Log.d(mTag, "Starting playback");
MediaPlayer player = new MediaPlayer();
- player.setAudioStreamType(cmd.stream);
+ player.setAudioAttributes(cmd.attributes);
player.setDataSource(cmd.context, cmd.uri);
player.setLooping(cmd.looping);
player.prepare();
@@ -159,21 +160,52 @@ public class AsyncPlayer {
* (see {@link MediaPlayer#setLooping(boolean)})
* @param stream the AudioStream to use.
* (see {@link MediaPlayer#setAudioStreamType(int)})
+ * @deprecated use {@link #play(Context, Uri, boolean, AudioAttributes)} instead
*/
public void play(Context context, Uri uri, boolean looping, int stream) {
+ if (context == null || uri == null) {
+ return;
+ }
+ try {
+ play(context, uri, looping,
+ new AudioAttributes.Builder().setInternalLegacyStreamType(stream).build());
+ } catch (IllegalArgumentException e) {
+ Log.e(mTag, "Call to deprecated AsyncPlayer.play() method caused:", e);
+ }
+ }
+
+ /**
+ * Start playing the sound. It will actually start playing at some
+ * point in the future. There are no guarantees about latency here.
+ * Calling this before another audio file is done playing will stop
+ * that one and start the new one.
+ *
+ * @param context the non-null application's context.
+ * @param uri the non-null URI to play. (see {@link MediaPlayer#setDataSource(Context, Uri)})
+ * @param looping whether the audio should loop forever.
+ * (see {@link MediaPlayer#setLooping(boolean)})
+ * @param attributes the non-null {@link AudioAttributes} to use.
+ * (see {@link MediaPlayer#setAudioAttributes(AudioAttributes)})
+ * @throws IllegalArgumentException
+ */
+ public void play(@NonNull Context context, @NonNull Uri uri, boolean looping,
+ @NonNull AudioAttributes attributes) throws IllegalArgumentException {
+ if (context == null || uri == null || attributes == null) {
+ throw new IllegalArgumentException("Illegal null AsyncPlayer.play() argument");
+ }
Command cmd = new Command();
cmd.requestTime = SystemClock.uptimeMillis();
cmd.code = PLAY;
cmd.context = context;
cmd.uri = uri;
cmd.looping = looping;
- cmd.stream = stream;
+ cmd.attributes = attributes;
synchronized (mCmdQueue) {
enqueueLocked(cmd);
mState = PLAY;
}
}
-
+
/**
* Stop a previously played sound. It can't be played again or unpaused
* at this point. Calling this multiple times has no ill effects.
diff --git a/media/java/android/media/AudioAttributes.java b/media/java/android/media/AudioAttributes.java
index 489f552..0f1be6b 100644
--- a/media/java/android/media/AudioAttributes.java
+++ b/media/java/android/media/AudioAttributes.java
@@ -17,6 +17,7 @@
package android.media;
import android.annotation.IntDef;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.os.Parcel;
import android.os.Parcelable;
@@ -27,7 +28,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Collections;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.Objects;
import java.util.Set;
@@ -209,8 +209,23 @@ public final class AudioAttributes implements Parcelable {
@SystemApi
public final static int FLAG_HW_HOTWORD = 0x1 << 5;
+ /**
+ * @hide
+ * Flag requesting audible playback even under limited interruptions.
+ */
+ @SystemApi
+ public final static int FLAG_BYPASS_INTERRUPTION_POLICY = 0x1 << 6;
+
+ /**
+ * @hide
+ * Flag requesting audible playback even when the underlying stream is muted.
+ */
+ @SystemApi
+ public final static int FLAG_BYPASS_MUTE = 0x1 << 7;
+
private final static int FLAG_ALL = FLAG_AUDIBILITY_ENFORCED | FLAG_SECURE | FLAG_SCO |
- FLAG_BEACON | FLAG_HW_AV_SYNC | FLAG_HW_HOTWORD;
+ FLAG_BEACON | FLAG_HW_AV_SYNC | FLAG_HW_HOTWORD | FLAG_BYPASS_INTERRUPTION_POLICY |
+ FLAG_BYPASS_MUTE;
private final static int FLAG_ALL_PUBLIC = FLAG_AUDIBILITY_ENFORCED | FLAG_HW_AV_SYNC;
private int mUsage = USAGE_UNKNOWN;
@@ -265,6 +280,7 @@ public final class AudioAttributes implements Parcelable {
* Internal use only
* @return a combined mask of all flags
*/
+ @SystemApi
public int getAllFlags() {
return (mFlags & FLAG_ALL);
}
@@ -527,14 +543,15 @@ public final class AudioAttributes implements Parcelable {
/**
* @hide
* Same as {@link #setCapturePreset(int)} but authorizes the use of HOTWORD,
- * REMOTE_SUBMIX and FM_TUNER.
+ * REMOTE_SUBMIX and RADIO_TUNER.
* @param preset
* @return the same Builder instance.
*/
+ @SystemApi
public Builder setInternalCapturePreset(int preset) {
if ((preset == MediaRecorder.AudioSource.HOTWORD)
|| (preset == MediaRecorder.AudioSource.REMOTE_SUBMIX)
- || (preset == MediaRecorder.AudioSource.FM_TUNER)) {
+ || (preset == MediaRecorder.AudioSource.RADIO_TUNER)) {
mSource = preset;
} else {
setCapturePreset(preset);
@@ -708,15 +725,47 @@ public final class AudioAttributes implements Parcelable {
return USAGE_UNKNOWN;
}
}
+ /**
+ * @hide
+ * CANDIDATE FOR PUBLIC (or at least SYSTEM) API
+ * Returns the stream type matching the given attributes for volume control.
+ * Use this method to derive the stream type needed to configure the volume
+ * control slider in an {@link Activity} with {@link Activity#setVolumeControlStream(int)}.
+ * <BR>Do not use this method to set the stream type on an audio player object
+ * (e.g. {@link AudioTrack}, {@link MediaPlayer}), use <code>AudioAttributes</code> instead.
+ * @param aa non-null AudioAttributes.
+ * @return a valid stream type for <code>Activity</code> or stream volume control that matches
+ * the attributes, or {@link AudioManager#USE_DEFAULT_STREAM_TYPE} if there isn't a direct
+ * match. Note that <code>USE_DEFAULT_STREAM_TYPE</code> is not a valid value
+ * for {@link AudioManager#setStreamVolume(int, int, int)}.
+ */
+ public static int getVolumeControlStream(@NonNull AudioAttributes aa) {
+ if (aa == null) {
+ throw new IllegalArgumentException("Invalid null audio attributes");
+ }
+ return toVolumeStreamType(true /*fromGetVolumeControlStream*/, aa);
+ }
- /** @hide */
- public static int toLegacyStreamType(AudioAttributes aa) {
+ /**
+ * @hide
+ * Only use to get which stream type should be used for volume control, NOT for audio playback
+ * (all audio playback APIs are supposed to take AudioAttributes as input parameters)
+ * @param aa non-null AudioAttributes.
+ * @return a valid stream type for volume control that matches the attributes.
+ */
+ public static int toLegacyStreamType(@NonNull AudioAttributes aa) {
+ return toVolumeStreamType(false /*fromGetVolumeControlStream*/, aa);
+ }
+
+ private static int toVolumeStreamType(boolean fromGetVolumeControlStream, AudioAttributes aa) {
// flags to stream type mapping
if ((aa.getFlags() & FLAG_AUDIBILITY_ENFORCED) == FLAG_AUDIBILITY_ENFORCED) {
- return AudioSystem.STREAM_SYSTEM_ENFORCED;
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_SYSTEM : AudioSystem.STREAM_SYSTEM_ENFORCED;
}
if ((aa.getFlags() & FLAG_SCO) == FLAG_SCO) {
- return AudioSystem.STREAM_BLUETOOTH_SCO;
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_VOICE_CALL : AudioSystem.STREAM_BLUETOOTH_SCO;
}
// usage to stream type mapping
@@ -731,7 +780,8 @@ public final class AudioAttributes implements Parcelable {
case USAGE_VOICE_COMMUNICATION:
return AudioSystem.STREAM_VOICE_CALL;
case USAGE_VOICE_COMMUNICATION_SIGNALLING:
- return AudioSystem.STREAM_DTMF;
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_VOICE_CALL : AudioSystem.STREAM_DTMF;
case USAGE_ALARM:
return AudioSystem.STREAM_ALARM;
case USAGE_NOTIFICATION_RINGTONE:
@@ -743,8 +793,15 @@ public final class AudioAttributes implements Parcelable {
case USAGE_NOTIFICATION_EVENT:
return AudioSystem.STREAM_NOTIFICATION;
case USAGE_UNKNOWN:
+ return fromGetVolumeControlStream ?
+ AudioManager.USE_DEFAULT_STREAM_TYPE : AudioSystem.STREAM_MUSIC;
default:
- return AudioSystem.STREAM_MUSIC;
+ if (fromGetVolumeControlStream) {
+ throw new IllegalArgumentException("Unknown usage value " + aa.getUsage() +
+ " in audio attributes");
+ } else {
+ return AudioSystem.STREAM_MUSIC;
+ }
}
}
diff --git a/media/java/android/media/AudioDeviceCallback.java b/media/java/android/media/AudioDeviceCallback.java
new file mode 100644
index 0000000..d9f0037
--- /dev/null
+++ b/media/java/android/media/AudioDeviceCallback.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * AudioDeviceCallback defines the mechanism by which applications can receive notifications
+ * of audio device connection and disconnection events.
+ * @see AudioManager#registerAudioDeviceCallback.
+ */
+public abstract class AudioDeviceCallback {
+ /**
+ * Called by the {@link AudioManager} to indicate that one or more audio devices have been
+ * connected.
+ * @param addedDevices An array of {@link AudioDeviceInfo} objects corresponding to any
+ * newly added audio devices.
+ */
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {}
+
+ /**
+ * Called by the {@link AudioManager} to indicate that one or more audio devices have been
+ * disconnected.
+ * @param removedDevices An array of {@link AudioDeviceInfo} objects corresponding to any
+ * newly removed audio devices.
+ */
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {}
+}
diff --git a/media/java/android/media/AudioDevice.java b/media/java/android/media/AudioDeviceInfo.java
index df4d60d..431d37e 100644
--- a/media/java/android/media/AudioDevice.java
+++ b/media/java/android/media/AudioDeviceInfo.java
@@ -16,13 +16,13 @@
package android.media;
+import android.annotation.NonNull;
import android.util.SparseIntArray;
/**
* Class to provide information about the audio devices.
- * @hide
*/
-public class AudioDevice {
+public final class AudioDeviceInfo {
/**
* A device type associated with an unknown or uninitialized device.
@@ -42,7 +42,7 @@ public class AudioDevice {
*/
public static final int TYPE_WIRED_HEADSET = 3;
/**
- * A device type describing a pair of wired headphones .
+ * A device type describing a pair of wired headphones.
*/
public static final int TYPE_WIRED_HEADPHONES = 4;
/**
@@ -54,7 +54,7 @@ public class AudioDevice {
*/
public static final int TYPE_LINE_DIGITAL = 6;
/**
- * A device type describing a Bluetooth device typically used for telephony .
+ * A device type describing a Bluetooth device typically used for telephony.
*/
public static final int TYPE_BLUETOOTH_SCO = 7;
/**
@@ -106,46 +106,112 @@ public class AudioDevice {
*/
public static final int TYPE_AUX_LINE = 19;
- AudioDevicePortConfig mConfig;
+ private final AudioDevicePort mPort;
- AudioDevice(AudioDevicePortConfig config) {
- mConfig = new AudioDevicePortConfig(config);
+ AudioDeviceInfo(AudioDevicePort port) {
+ mPort = port;
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The internal device ID.
*/
- public boolean isInputDevice() {
- return (mConfig.port().role() == AudioPort.ROLE_SOURCE);
+ public int getId() {
+ return mPort.handle().id();
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The human-readable name of the audio device.
*/
- public boolean isOutputDevice() {
- return (mConfig.port().role() == AudioPort.ROLE_SINK);
+ public CharSequence getProductName() {
+ String portName = mPort.name();
+ return portName.length() != 0 ? portName : android.os.Build.MODEL;
}
/**
* @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The "address" string of the device. This generally contains device-specific
+ * parameters.
+ */
+ public String getAddress() {
+ return mPort.address();
+ }
+
+ /**
+ * @return true if the audio device is a source for audio data (e.e an input).
*/
- public int getDeviceType() {
- return INT_TO_EXT_DEVICE_MAPPING.get(mConfig.port().type(), TYPE_UNKNOWN);
+ public boolean isSource() {
+ return mPort.role() == AudioPort.ROLE_SOURCE;
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return true if the audio device is a sink for audio data (i.e. an output).
*/
- public String getAddress() {
- return mConfig.port().address();
+ public boolean isSink() {
+ return mPort.role() == AudioPort.ROLE_SINK;
+ }
+
+ /**
+ * @return An array of sample rates supported by the audio device.
+ */
+ public @NonNull int[] getSampleRates() {
+ return mPort.samplingRates();
+ }
+
+ /**
+ * @return An array of channel position masks (e.g. {@link AudioFormat#CHANNEL_IN_STEREO},
+ * {@link AudioFormat#CHANNEL_OUT_7POINT1}) for which this audio device can be configured.
+ *
+ * @see AudioFormat
+ */
+ public @NonNull int[] getChannelMasks() {
+ return mPort.channelMasks();
+ }
+
+ /**
+ * @return An array of channel index masks for which this audio device can be configured.
+ *
+ * @see AudioFormat
+ */
+ public @NonNull int[] getChannelIndexMasks() {
+ // TODO: implement
+ return new int[0];
+ }
+
+ /**
+ * @return An array of channel counts (1, 2, 4, ...) for which this audio device
+ * can be configured.
+ */
+ public @NonNull int[] getChannelCounts() {
+ int[] masks = getChannelMasks();
+ int[] counts = new int[masks.length];
+ // TODO: consider channel index masks
+ for (int mask_index = 0; mask_index < masks.length; mask_index++) {
+ counts[mask_index] = isSink()
+ ? AudioFormat.channelCountFromOutChannelMask(masks[mask_index])
+ : AudioFormat.channelCountFromInChannelMask(masks[mask_index]);
+ }
+ return counts;
+ }
+
+ /**
+ * @return An array of audio encodings (e.g. {@link AudioFormat#ENCODING_PCM_16BIT},
+ * {@link AudioFormat#ENCODING_PCM_FLOAT}) supported by the audio device.
+ * <code>ENCODING_PCM_FLOAT</code> indicates the device supports more
+ * than 16 bits of integer precision. Specifying <code>ENCODING_PCM_FLOAT</code>
+ * with {@link AudioTrack} or {@link AudioRecord} can preserve at least 24 bits of
+ * integer precision to that device.
+ *
+ * @see AudioFormat
+ */
+ public @NonNull int[] getEncodings() {
+ return AudioFormat.filterPublicFormats(mPort.formats());
+ }
+
+ /**
+ * @return The device type identifier of the audio device (i.e. TYPE_BUILTIN_SPEAKER).
+ */
+ public int getType() {
+ return INT_TO_EXT_DEVICE_MAPPING.get(mPort.type(), TYPE_UNKNOWN);
}
/** @hide */
diff --git a/media/java/android/media/AudioDevicePort.java b/media/java/android/media/AudioDevicePort.java
index b10736b..c078260 100644
--- a/media/java/android/media/AudioDevicePort.java
+++ b/media/java/android/media/AudioDevicePort.java
@@ -36,12 +36,13 @@ public class AudioDevicePort extends AudioPort {
private final int mType;
private final String mAddress;
- AudioDevicePort(AudioHandle handle, int[] samplingRates, int[] channelMasks,
+ AudioDevicePort(AudioHandle handle, String deviceName,
+ int[] samplingRates, int[] channelMasks,
int[] formats, AudioGain[] gains, int type, String address) {
super(handle,
(AudioManager.isInputDevice(type) == true) ?
AudioPort.ROLE_SOURCE : AudioPort.ROLE_SINK,
- samplingRates, channelMasks, formats, gains);
+ deviceName, samplingRates, channelMasks, formats, gains);
mType = type;
mAddress = address;
}
@@ -82,6 +83,16 @@ public class AudioDevicePort extends AudioPort {
if (o == null || !(o instanceof AudioDevicePort)) {
return false;
}
+ AudioDevicePort other = (AudioDevicePort)o;
+ if (mType != other.type()) {
+ return false;
+ }
+ if (mAddress == null && other.address() != null) {
+ return false;
+ }
+ if (!mAddress.equals(other.address())) {
+ return false;
+ }
return super.equals(o);
}
diff --git a/media/java/android/media/AudioFocusInfo.java b/media/java/android/media/AudioFocusInfo.java
index fbdda3c..540c328 100644
--- a/media/java/android/media/AudioFocusInfo.java
+++ b/media/java/android/media/AudioFocusInfo.java
@@ -45,8 +45,9 @@ public final class AudioFocusInfo implements Parcelable {
* @param gainRequest
* @param lossReceived
* @param flags
+ * @hide
*/
- AudioFocusInfo(AudioAttributes aa, String clientId, String packageName,
+ public AudioFocusInfo(AudioAttributes aa, String clientId, String packageName,
int gainRequest, int lossReceived, int flags) {
mAttributes = aa == null ? new AudioAttributes.Builder().build() : aa;
mClientId = clientId == null ? "" : clientId;
@@ -91,7 +92,7 @@ public final class AudioFocusInfo implements Parcelable {
public int getLossReceived() { return mLossReceived; }
/** @hide */
- void clearLossReceived() { mLossReceived = 0; }
+ public void clearLossReceived() { mLossReceived = 0; }
/**
* The flags set in the audio focus request.
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index 9a0266d..ee6d661 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -17,15 +17,116 @@
package android.media;
import android.annotation.IntDef;
-
+import android.annotation.NonNull;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
+import java.util.Arrays;
/**
- * The AudioFormat class is used to access a number of audio format and
+ * The <code>AudioFormat</code> class is used to access a number of audio format and
* channel configuration constants. They are for instance used
- * in {@link AudioTrack} and {@link AudioRecord}.
+ * in {@link AudioTrack} and {@link AudioRecord}, as valid values in individual parameters of
+ * constructors like {@link AudioTrack#AudioTrack(int, int, int, int, int, int)}, where the fourth
+ * parameter is one of the <code>AudioFormat.ENCODING_*</code> constants.
+ * <p>The {@link AudioFormat.Builder} class can be used to create instances of
+ * the <code>AudioFormat</code> format class.
+ * Refer to
+ * {@link AudioFormat.Builder} for documentation on the mechanics of the configuration and building
+ * of such instances. Here we describe the main concepts that the <code>AudioFormat</code> class
+ * allow you to convey in each instance, they are:
+ * <ol>
+ * <li><a href="#sampleRate">sample rate</a>
+ * <li><a href="#encoding">encoding</a>
+ * <li><a href="#channelMask">channel masks</a>
+ * </ol>
+ *
+ * <h4 id="sampleRate">Sample rate</h4>
+ * <p>Expressed in Hz, the sample rate in an <code>AudioFormat</code> instance expresses the number
+ * of audio samples for each channel per second in the content you are playing or recording. It is
+ * not the sample rate
+ * at which content is rendered or produced. For instance a sound at a media sample rate of 8000Hz
+ * can be played on a device operating at a sample rate of 48000Hz; the sample rate conversion is
+ * automatically handled by the platform, it will not play at 6x speed.
+ *
+ * <h4 id="encoding">Encoding</h4>
+ * <p>For PCM audio, audio encoding is used to describe the bit representation of an audio data
+ * sample; for example, the size as 8 bit, 16 bit, and the representation as integer or float.
+ * <br>For compressed formats, audio encoding is used to describe the compression scheme being used.
+ *
+ * <h4 id="channelMask">Channel mask</h4>
+ * <p>Channel masks are used in <code>AudioTrack</code> and <code>AudioRecord</code> to describe
+ * the samples and their arrangement in the audio frame. They are also used in the endpoint (e.g.
+ * a USB audio interface, a DAC connected to headphones) to specify allowable configurations of a
+ * particular device.
+ * <br>As of API {@link android.os.Build.VERSION_CODES#MNC}, there are two types of channel masks:
+ * channel position masks and channel index masks.
*
+ * <h5 id="channelPositionMask">Channel position masks</h5>
+ * Channel position masks are the original Android channel masks, and are used since API
+ * {@link android.os.Build.VERSION_CODES#BASE}.
+ * For input and output, they imply a positional nature - the location of a speaker or a microphone
+ * for recording or playback.
+ * <br>For a channel position mask, each allowed channel position corresponds to a bit in the
+ * channel mask. If that channel position is present in the audio frame, that bit is set,
+ * otherwise it is zero. The order of the bits (from lsb to msb) corresponds to the order of that
+ * position's sample in the audio frame.
+ * <br>The canonical channel position masks by channel count are as follows:
+ * <br><table>
+ * <tr><td>channel count</td><td>channel position mask</td></tr>
+ * <tr><td>1</td><td>{@link #CHANNEL_OUT_MONO}</td></tr>
+ * <tr><td>2</td><td>{@link #CHANNEL_OUT_STEREO}</td></tr>
+ * <tr><td>3</td><td>{@link #CHANNEL_OUT_STEREO} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr>
+ * <tr><td>4</td><td>{@link #CHANNEL_OUT_QUAD}</td></tr>
+ * <tr><td>5</td><td>{@link #CHANNEL_OUT_QUAD} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr>
+ * <tr><td>6</td><td>{@link #CHANNEL_OUT_5POINT1}</td></tr>
+ * <tr><td>7</td><td>{@link #CHANNEL_OUT_5POINT1} | {@link #CHANNEL_OUT_BACK_CENTER}</td></tr>
+ * <tr><td>8</td><td>{@link #CHANNEL_OUT_7POINT1_SURROUND}</td></tr>
+ * </table>
+ * <br>These masks are an ORed composite of individual channel masks. For example
+ * {@link #CHANNEL_OUT_STEREO} is composed of {@link #CHANNEL_OUT_FRONT_LEFT} and
+ * {@link #CHANNEL_OUT_FRONT_RIGHT}.
+ *
+ * <h5 id="channelIndexMask">Channel index masks</h5>
+ * Channel index masks are introduced in API {@link android.os.Build.VERSION_CODES#MNC}. They allow
+ * the selection of a particular channel from the source or sink endpoint by number, i.e. the first
+ * channel, the second channel, and so forth. This avoids problems with artificially assigning
+ * positions to channels of an endpoint, or figuring what the i<sup>th</sup> position bit is within
+ * an endpoint's channel position mask etc.
+ * <br>Here's an example where channel index masks address this confusion: dealing with a 4 channel
+ * USB device. Using a position mask, and based on the channel count, this would be a
+ * {@link #CHANNEL_OUT_QUAD} device, but really one is only interested in channel 0
+ * through channel 3. The USB device would then have the following individual bit channel masks:
+ * {@link #CHANNEL_OUT_FRONT_LEFT},
+ * {@link #CHANNEL_OUT_FRONT_RIGHT}, {@link #CHANNEL_OUT_BACK_LEFT}
+ * and {@link #CHANNEL_OUT_BACK_RIGHT}. But which is channel 0 and which is
+ * channel 3?
+ * <br>For a channel index mask, each channel number is represented as a bit in the mask, from the
+ * lsb (channel 0) upwards to the msb, numerically this bit value is
+ * <code>1 << channelNumber</code>.
+ * A set bit indicates that channel is present in the audio frame, otherwise it is cleared.
+ * The order of the bits also correspond to that channel number's sample order in the audio frame.
+ * <br>For the previous 4 channel USB device example, the device would have a channel index mask
+ * <code>0xF</code>. Suppose we wanted to select only the first and the third channels; this would
+ * correspond to a channel index mask <code>0x5</code> (the first and third bits set). If an
+ * <code>AudioTrack</code> uses this channel index mask, the audio frame would consist of two
+ * samples, the first sample of each frame routed to channel 0, and the second sample of each frame
+ * routed to channel 2.
+ * The canonical channel index masks by channel count are given by the formula
+ * <code>(1 << channelCount) - 1</code>.
+ *
+ * <h5>Use cases</h5>
+ * <ul>
+ * <li><i>Channel position mask for an endpoint:</i> <code>CHANNEL_OUT_FRONT_LEFT</code>,
+ * <code>CHANNEL_OUT_FRONT_CENTER</code>, etc. for HDMI home theater purposes.
+ * <li><i>Channel position mask for an audio stream:</i> Creating an <code>AudioTrack</code>
+ * to output movie content, where 5.1 multichannel output is to be written.
+ * <li><i>Channel index mask for an endpoint:</i> USB devices for which input and output do not
+ * correspond to left or right speaker or microphone.
+ * <li><i>Channel index mask for an audio stream:</i> An <code>AudioRecord</code> may only want the
+ * third and fourth audio channels of the endpoint (i.e. the second channel pair), and not care the
+ * about position it corresponds to, in which case the channel index mask is <code>0xC</code>.
+ * Multichannel <code>AudioRecord</code> sessions should use channel index masks.
+ * </ul>
*/
public class AudioFormat {
@@ -38,6 +139,7 @@ public class AudioFormat {
public static final int ENCODING_DEFAULT = 1;
// These values must be kept in sync with core/jni/android_media_AudioFormat.h
+ // Also sync av/services/audiopolicy/managerdefault/ConfigParsingUtils.h
/** Audio data format: PCM 16 bit per sample. Guaranteed to be supported by devices. */
public static final int ENCODING_PCM_16BIT = 2;
/** Audio data format: PCM 8 bit per sample. Not guaranteed to be supported by devices. */
@@ -48,18 +150,38 @@ public class AudioFormat {
public static final int ENCODING_AC3 = 5;
/** Audio data format: E-AC-3 compressed */
public static final int ENCODING_E_AC3 = 6;
+ /** Audio data format: DTS compressed */
+ public static final int ENCODING_DTS = 7;
+ /** Audio data format: DTS HD compressed */
+ public static final int ENCODING_DTS_HD = 8;
+ /** Audio data format: MP3 compressed
+ * @hide
+ * */
+ public static final int ENCODING_MP3 = 9;
+ /** Audio data format: AAC LC compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_LC = 10;
+ /** Audio data format: AAC HE V1 compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_HE_V1 = 11;
+ /** Audio data format: AAC HE V2 compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_HE_V2 = 12;
/** Invalid audio channel configuration */
- /** @deprecated use CHANNEL_INVALID instead */
+ /** @deprecated Use {@link #CHANNEL_INVALID} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0;
/** Default audio channel configuration */
- /** @deprecated use CHANNEL_OUT_DEFAULT or CHANNEL_IN_DEFAULT instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_DEFAULT} or {@link #CHANNEL_IN_DEFAULT} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1;
/** Mono audio configuration */
- /** @deprecated use CHANNEL_OUT_MONO or CHANNEL_IN_MONO instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_MONO} or {@link #CHANNEL_IN_MONO} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2;
/** Stereo (2 channel) audio configuration */
- /** @deprecated use CHANNEL_OUT_STEREO or CHANNEL_IN_STEREO instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_STEREO} or {@link #CHANNEL_IN_STEREO} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3;
/** Invalid audio channel mask */
@@ -68,7 +190,7 @@ public class AudioFormat {
public static final int CHANNEL_OUT_DEFAULT = 1;
// Output channel mask definitions below are translated to the native values defined in
- // in /system/core/include/system/audio.h in the JNI code of AudioTrack
+ // in /system/media/audio/include/system/audio.h in the JNI code of AudioTrack
public static final int CHANNEL_OUT_FRONT_LEFT = 0x4;
public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8;
public static final int CHANNEL_OUT_FRONT_CENTER = 0x10;
@@ -112,12 +234,11 @@ public class AudioFormat {
public static final int CHANNEL_OUT_5POINT1_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY |
CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT);
- // TODO does this need an @deprecated ?
- // different from AUDIO_CHANNEL_OUT_7POINT1
- public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ // different from AUDIO_CHANNEL_OUT_7POINT1 used internally, and not accepted by AudioRecord.
+ /** @deprecated Not the typical 7.1 surround configuration. Use {@link #CHANNEL_OUT_7POINT1_SURROUND} instead. */
+ @Deprecated public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT |
CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER);
- /** @hide */
// matches AUDIO_CHANNEL_OUT_7POINT1
public static final int CHANNEL_OUT_7POINT1_SURROUND = (
CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_FRONT_RIGHT |
@@ -235,6 +356,29 @@ public class AudioFormat {
case ENCODING_PCM_FLOAT:
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_MP3:
+ case ENCODING_AAC_LC:
+ case ENCODING_AAC_HE_V1:
+ case ENCODING_AAC_HE_V2:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /** @hide */
+ public static boolean isPublicEncoding(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
return true;
default:
return false;
@@ -252,6 +396,12 @@ public class AudioFormat {
return true;
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_MP3:
+ case ENCODING_AAC_LC:
+ case ENCODING_AAC_HE_V1:
+ case ENCODING_AAC_HE_V2:
return false;
case ENCODING_INVALID:
default:
@@ -259,6 +409,28 @@ public class AudioFormat {
}
}
+ /**
+ * Returns an array of public encoding values extracted from an array of
+ * encoding values.
+ * @hide
+ */
+ public static int[] filterPublicFormats(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+ int[] myCopy = Arrays.copyOf(formats, formats.length);
+ int size = 0;
+ for (int i = 0; i < myCopy.length; i++) {
+ if (isPublicEncoding(myCopy[i])) {
+ if (size != i) {
+ myCopy[size] = myCopy[i];
+ }
+ size++;
+ }
+ }
+ return Arrays.copyOf(myCopy, size);
+ }
+
/** @removed */
public AudioFormat()
{
@@ -277,13 +449,15 @@ public class AudioFormat {
*/
// Update sound trigger JNI in core/jni/android_hardware_SoundTrigger.cpp when modifying this
// constructor
- private AudioFormat(int encoding, int sampleRate, int channelMask) {
+ private AudioFormat(int encoding, int sampleRate, int channelMask, int channelIndexMask) {
mEncoding = encoding;
mSampleRate = sampleRate;
mChannelMask = channelMask;
+ mChannelIndexMask = channelIndexMask;
mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_ENCODING |
AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE |
- AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK;
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK |
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
}
/** @hide */
@@ -294,14 +468,19 @@ public class AudioFormat {
public final static int AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE = 0x1 << 1;
/** @hide */
public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK = 0x1 << 2;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK = 0x1 << 3;
private int mEncoding;
private int mSampleRate;
private int mChannelMask;
+ private int mChannelIndexMask;
private int mPropertySetMask;
/**
* Return the encoding.
+ * See the section on <a href="#encoding">encodings</a> for more information about the different
+ * types of supported audio encoding.
* @return one of the values that can be set in {@link Builder#setEncoding(int)} or
* {@link AudioFormat#ENCODING_INVALID} if not set.
*/
@@ -326,6 +505,9 @@ public class AudioFormat {
/**
* Return the channel mask.
+ * See the section on <a href="#channelMask">channel masks</a> for more information about
+ * the difference between index-based masks(as returned by {@link #getChannelIndexMask()}) and
+ * the position-based mask returned by this function.
* @return one of the values that can be set in {@link Builder#setChannelMask(int)} or
* {@link AudioFormat#CHANNEL_INVALID} if not set.
*/
@@ -336,6 +518,37 @@ public class AudioFormat {
return mChannelMask;
}
+ /**
+ * Return the channel index mask.
+ * See the section on <a href="#channelMask">channel masks</a> for more information about
+ * the difference between index-based masks, and position-based masks (as returned
+ * by {@link #getChannelMask()}).
+ * @return one of the values that can be set in {@link Builder#setChannelIndexMask(int)} or
+ * {@link AudioFormat#CHANNEL_INVALID} if not set or an invalid mask was used.
+ */
+ public int getChannelIndexMask() {
+ if ((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) == 0) {
+ return CHANNEL_INVALID;
+ }
+ return mChannelIndexMask;
+ }
+
+ /**
+ * Return the channel count.
+ * @return the channel count derived from the channel position mask or the channel index mask.
+ * Zero is returned if both the channel position mask and the channel index mask are not set.
+ */
+ public int getChannelCount() {
+ final int channelIndexCount = Integer.bitCount(getChannelIndexMask());
+ int channelCount = channelCountFromOutChannelMask(getChannelMask());
+ if (channelCount == 0) {
+ channelCount = channelIndexCount;
+ } else if (channelCount != channelIndexCount && channelIndexCount != 0) {
+ channelCount = 0; // position and index channel count mismatch
+ }
+ return channelCount;
+ }
+
/** @hide */
public int getPropertySetMask() {
return mPropertySetMask;
@@ -346,7 +559,8 @@ public class AudioFormat {
* Use this class to configure and create an AudioFormat instance. By setting format
* characteristics such as audio encoding, channel mask or sample rate, you indicate which
* of those are to vary from the default behavior on this device wherever this audio format
- * is used.
+ * is used. See {@link AudioFormat} for a complete description of the different parameters that
+ * can be used to configure an <code>AudioFormat</code> instance.
* <p>{@link AudioFormat} is for instance used in
* {@link AudioTrack#AudioTrack(AudioAttributes, AudioFormat, int, int, int)}. In this
* constructor, every format characteristic set on the <code>Builder</code> (e.g. with
@@ -359,6 +573,7 @@ public class AudioFormat {
private int mEncoding = ENCODING_INVALID;
private int mSampleRate = 0;
private int mChannelMask = CHANNEL_INVALID;
+ private int mChannelIndexMask = 0;
private int mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_NONE;
/**
@@ -375,6 +590,7 @@ public class AudioFormat {
mEncoding = af.mEncoding;
mSampleRate = af.mSampleRate;
mChannelMask = af.mChannelMask;
+ mChannelIndexMask = af.mChannelIndexMask;
mPropertySetMask = af.mPropertySetMask;
}
@@ -388,6 +604,7 @@ public class AudioFormat {
af.mEncoding = mEncoding;
af.mSampleRate = mSampleRate;
af.mChannelMask = mChannelMask;
+ af.mChannelIndexMask = mChannelIndexMask;
af.mPropertySetMask = mPropertySetMask;
return af;
}
@@ -400,6 +617,8 @@ public class AudioFormat {
* {@link AudioFormat#ENCODING_PCM_FLOAT},
* {@link AudioFormat#ENCODING_AC3},
* {@link AudioFormat#ENCODING_E_AC3}.
+ * {@link AudioFormat#ENCODING_DTS},
+ * {@link AudioFormat#ENCODING_DTS_HD}.
* @return the same Builder instance.
* @throws java.lang.IllegalArgumentException
*/
@@ -413,6 +632,8 @@ public class AudioFormat {
case ENCODING_PCM_FLOAT:
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
mEncoding = encoding;
break;
case ENCODING_INVALID:
@@ -424,29 +645,105 @@ public class AudioFormat {
}
/**
- * Sets the channel mask.
+ * Sets the channel position mask.
+ * The channel position mask specifies the association between audio samples in a frame
+ * with named endpoint channels. The samples in the frame correspond to the
+ * named set bits in the channel position mask, in ascending bit order.
+ * See {@link #setChannelIndexMask(int)} to specify channels
+ * based on endpoint numbered channels. This <a href="#channelPositionMask>description of
+ * channel position masks</a> covers the concept in more details.
* @param channelMask describes the configuration of the audio channels.
- * <p>For output, the mask should be a combination of
+ * <p> For output, the channelMask can be an OR-ed combination of
+ * channel position masks, e.g.
* {@link AudioFormat#CHANNEL_OUT_FRONT_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER},
* {@link AudioFormat#CHANNEL_OUT_FRONT_RIGHT},
- * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_LOW_FREQUENCY}
* {@link AudioFormat#CHANNEL_OUT_BACK_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT}.
- * <p>for input, the mask should be {@link AudioFormat#CHANNEL_IN_MONO} or
+ * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_BACK_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT}.
+ * <p> For a valid {@link AudioTrack} channel position mask,
+ * the following conditions apply:
+ * <br> (1) at most eight channel positions may be used;
+ * <br> (2) right/left pairs should be matched.
+ * <p> For input or {@link AudioRecord}, the mask should be
+ * {@link AudioFormat#CHANNEL_IN_MONO} or
* {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is
* guaranteed to work on all devices.
- * @return the same Builder instance.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
*/
- public Builder setChannelMask(int channelMask) {
- // only validated when used, with input or output context
+ public @NonNull Builder setChannelMask(int channelMask) {
+ if (channelMask == 0) {
+ throw new IllegalArgumentException("Invalid zero channel mask");
+ } else if (/* channelMask != 0 && */ mChannelIndexMask != 0 &&
+ Integer.bitCount(channelMask) != Integer.bitCount(mChannelIndexMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for mask " +
+ Integer.toHexString(channelMask).toUpperCase());
+ }
mChannelMask = channelMask;
mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK;
return this;
}
/**
+ * Sets the channel index mask.
+ * A channel index mask specifies the association of audio samples in the frame
+ * with numbered endpoint channels. The i-th bit in the channel index
+ * mask corresponds to the i-th endpoint channel.
+ * For example, an endpoint with four channels is represented
+ * as index mask bits 0 through 3. This <a href="#channelIndexMask>description of channel
+ * index masks</a> covers the concept in more details.
+ * See {@link #setChannelMask(int)} for a positional mask interpretation.
+ * <p> Both {@link AudioTrack} and {@link AudioRecord} support
+ * a channel index mask.
+ * If a channel index mask is specified it is used,
+ * otherwise the channel position mask specified
+ * by <code>setChannelMask</code> is used.
+ * For <code>AudioTrack</code> and <code>AudioRecord</code>,
+ * a channel position mask is not required if a channel index mask is specified.
+ *
+ * @param channelIndexMask describes the configuration of the audio channels.
+ * <p> For output, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of <code>AudioTrack</code> write samples
+ * to output sink channels.
+ * For example, a mask of <code>0xa</code>, or binary <code>1010</code>,
+ * means the <code>AudioTrack</code> write frame consists of two samples,
+ * which are routed to the second and the fourth channels of the output sink.
+ * Unmatched output sink channels are zero filled and unmatched
+ * <code>AudioTrack</code> write samples are dropped.
+ * <p> For input, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of input source channels to
+ * <code>AudioRecord</code> read samples.
+ * For example, a mask of <code>0x5</code>, or binary
+ * <code>101</code>, will read from the first and third channel of the input
+ * source device and store them in the first and second sample of the
+ * <code>AudioRecord</code> read frame.
+ * Unmatched input source channels are dropped and
+ * unmatched <code>AudioRecord</code> read samples are zero filled.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel index mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
+ */
+ public @NonNull Builder setChannelIndexMask(int channelIndexMask) {
+ if (channelIndexMask == 0) {
+ throw new IllegalArgumentException("Invalid zero channel index mask");
+ } else if (/* channelIndexMask != 0 && */ mChannelMask != 0 &&
+ Integer.bitCount(channelIndexMask) != Integer.bitCount(mChannelMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for index mask " +
+ Integer.toHexString(channelIndexMask).toUpperCase());
+ }
+ mChannelIndexMask = channelIndexMask;
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
+ return this;
+ }
+
+ /**
* Sets the sample rate.
* @param sampleRate the sample rate expressed in Hz
* @return the same Builder instance.
@@ -467,7 +764,8 @@ public class AudioFormat {
return new String("AudioFormat:"
+ " props=" + mPropertySetMask
+ " enc=" + mEncoding
- + " chan=0x" + Integer.toHexString(mChannelMask)
+ + " chan=0x" + Integer.toHexString(mChannelMask).toUpperCase()
+ + " chan_index=0x" + Integer.toHexString(mChannelIndexMask).toUpperCase()
+ " rate=" + mSampleRate);
}
@@ -478,7 +776,9 @@ public class AudioFormat {
ENCODING_PCM_16BIT,
ENCODING_PCM_FLOAT,
ENCODING_AC3,
- ENCODING_E_AC3
+ ENCODING_E_AC3,
+ ENCODING_DTS,
+ ENCODING_DTS_HD
})
@Retention(RetentionPolicy.SOURCE)
public @interface Encoding {}
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index ed6ce87..316ccf6 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -26,9 +26,7 @@ import android.bluetooth.BluetoothDevice;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
-import android.media.RemoteController.OnClientUpdateListener;
import android.media.audiopolicy.AudioPolicy;
-import android.media.audiopolicy.AudioPolicyConfig;
import android.media.session.MediaController;
import android.media.session.MediaSession;
import android.media.session.MediaSessionLegacyHelper;
@@ -40,13 +38,17 @@ import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
+import android.os.SystemProperties;
import android.os.SystemClock;
import android.os.ServiceManager;
import android.provider.Settings;
+import android.util.ArrayMap;
import android.util.Log;
+import android.util.Pair;
import android.view.KeyEvent;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
@@ -58,16 +60,25 @@ import java.util.Iterator;
*/
public class AudioManager {
- private final Context mApplicationContext;
+ private Context mOriginalContext;
+ private Context mApplicationContext;
private long mVolumeKeyUpTime;
- private final boolean mUseMasterVolume;
private final boolean mUseVolumeKeySounds;
private final boolean mUseFixedVolume;
- private final Binder mToken = new Binder();
private static String TAG = "AudioManager";
private static final AudioPortEventHandler sAudioPortEventHandler = new AudioPortEventHandler();
/**
+ * System properties for whether the default microphone and speaker paths support
+ * near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ private static final String SYSTEM_PROPERTY_MIC_NEAR_ULTRASOUND =
+ "persist.audio.mic.ultrasound";
+ private static final String SYSTEM_PROPERTY_SPEAKER_NEAR_ULTRASOUND =
+ "persist.audio.spkr.ultrasound";
+ private static final String DEFAULT_RESULT_FALSE_STRING = "false";
+
+ /**
* Broadcast intent, a hint for applications that audio is about to become
* 'noisy' due to a change in audio outputs. For example, this intent may
* be sent when a wired headset is unplugged, or when an A2DP audio
@@ -138,26 +149,31 @@ public class AudioManager {
public static final String VOLUME_CHANGED_ACTION = "android.media.VOLUME_CHANGED_ACTION";
/**
- * @hide Broadcast intent when a stream mute state changes.
- * Includes the stream that changed and the new mute state
+ * @hide Broadcast intent when the devices for a particular stream type changes.
+ * Includes the stream, the new devices and previous devices.
+ * Notes:
+ * - for internal platform use only, do not make public,
+ * - never used for "remote" volume changes
*
* @see #EXTRA_VOLUME_STREAM_TYPE
- * @see #EXTRA_STREAM_VOLUME_MUTED
+ * @see #EXTRA_VOLUME_STREAM_DEVICES
+ * @see #EXTRA_PREV_VOLUME_STREAM_DEVICES
+ * @see #getDevicesForStream
*/
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String STREAM_MUTE_CHANGED_ACTION =
- "android.media.STREAM_MUTE_CHANGED_ACTION";
+ public static final String STREAM_DEVICES_CHANGED_ACTION =
+ "android.media.STREAM_DEVICES_CHANGED_ACTION";
/**
- * @hide Broadcast intent when the master volume changes.
- * Includes the new volume
+ * @hide Broadcast intent when a stream mute state changes.
+ * Includes the stream that changed and the new mute state
*
- * @see #EXTRA_MASTER_VOLUME_VALUE
- * @see #EXTRA_PREV_MASTER_VOLUME_VALUE
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_STREAM_VOLUME_MUTED
*/
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String MASTER_VOLUME_CHANGED_ACTION =
- "android.media.MASTER_VOLUME_CHANGED_ACTION";
+ public static final String STREAM_MUTE_CHANGED_ACTION =
+ "android.media.STREAM_MUTE_CHANGED_ACTION";
/**
* @hide Broadcast intent when the master mute state changes.
@@ -211,18 +227,16 @@ public class AudioManager {
"android.media.EXTRA_PREV_VOLUME_STREAM_VALUE";
/**
- * @hide The new master volume value for the master volume changed intent.
- * Value is integer between 0 and 100 inclusive.
+ * @hide The devices associated with the stream for the stream devices changed intent.
*/
- public static final String EXTRA_MASTER_VOLUME_VALUE =
- "android.media.EXTRA_MASTER_VOLUME_VALUE";
+ public static final String EXTRA_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_VOLUME_STREAM_DEVICES";
/**
- * @hide The previous master volume value for the master volume changed intent.
- * Value is integer between 0 and 100 inclusive.
+ * @hide The previous devices associated with the stream for the stream devices changed intent.
*/
- public static final String EXTRA_PREV_MASTER_VOLUME_VALUE =
- "android.media.EXTRA_PREV_MASTER_VOLUME_VALUE";
+ public static final String EXTRA_PREV_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_PREV_VOLUME_STREAM_DEVICES";
/**
* @hide The new master volume mute state for the master mute changed intent.
@@ -295,68 +309,6 @@ public class AudioManager {
*/
public static final String EXTRA_ENCODINGS = "android.media.extra.ENCODINGS";
- /**
- * Broadcast Action: An analog audio speaker/headset plugged in or unplugged.
- *
- * <p>The intent will have the following extra values:
- * <ul>
- * <li><em>state</em> - 0 for unplugged, 1 for plugged. </li>
- * <li><em>name</em> - Headset type, human readable string </li>
- * </ul>
- * </ul>
- * @hide
- */
- @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String ACTION_ANALOG_AUDIO_DOCK_PLUG =
- "android.media.action.ANALOG_AUDIO_DOCK_PLUG";
-
- /**
- * Broadcast Action: A digital audio speaker/headset plugged in or unplugged.
- *
- * <p>The intent will have the following extra values:
- * <ul>
- * <li><em>state</em> - 0 for unplugged, 1 for plugged. </li>
- * <li><em>name</em> - Headset type, human readable string </li>
- * </ul>
- * </ul>
- * @hide
- */
- @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String ACTION_DIGITAL_AUDIO_DOCK_PLUG =
- "android.media.action.DIGITAL_AUDIO_DOCK_PLUG";
-
- /**
- * Broadcast Action: A USB audio accessory was plugged in or unplugged.
- *
- * <p>The intent will have the following extra values:
- * <ul>
- * <li><em>state</em> - 0 for unplugged, 1 for plugged. </li>
- * <li><em>card</em> - ALSA card number (integer) </li>
- * <li><em>device</em> - ALSA device number (integer) </li>
- * </ul>
- * </ul>
- * @hide
- */
- @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String ACTION_USB_AUDIO_ACCESSORY_PLUG =
- "android.media.action.USB_AUDIO_ACCESSORY_PLUG";
-
- /**
- * Broadcast Action: A USB audio device was plugged in or unplugged.
- *
- * <p>The intent will have the following extra values:
- * <ul>
- * <li><em>state</em> - 0 for unplugged, 1 for plugged. </li>
- * <li><em>card</em> - ALSA card number (integer) </li>
- * <li><em>device</em> - ALSA device number (integer) </li>
- * </ul>
- * </ul>
- * @hide
- */
- @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
- public static final String ACTION_USB_AUDIO_DEVICE_PLUG =
- "android.media.action.USB_AUDIO_DEVICE_PLUG";
-
/** The audio stream for phone calls */
public static final int STREAM_VOICE_CALL = AudioSystem.STREAM_VOICE_CALL;
/** The audio stream for system sounds */
@@ -408,6 +360,31 @@ public class AudioManager {
*/
public static final int ADJUST_SAME = 0;
+ /**
+ * Mute the volume. Has no effect if the stream is already muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_MUTE = -100;
+
+ /**
+ * Unmute the volume. Has no effect if the stream is not muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_UNMUTE = 100;
+
+ /**
+ * Toggle the mute state. If muted the stream will be unmuted. If not muted
+ * the stream will be muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_TOGGLE_MUTE = 101;
+
// Flags should be powers of 2!
/**
@@ -504,6 +481,12 @@ public class AudioManager {
*/
public static final int FLAG_SHOW_VIBRATE_HINT = 1 << 11;
+ /**
+ * Adjusting the volume due to a hardware key press.
+ * @hide
+ */
+ public static final int FLAG_FROM_KEY = 1 << 12;
+
private static final String[] FLAG_NAMES = {
"FLAG_SHOW_UI",
"FLAG_ALLOW_RINGER_MODES",
@@ -517,6 +500,7 @@ public class AudioManager {
"FLAG_ACTIVE_MEDIA_ONLY",
"FLAG_SHOW_UI_WARNINGS",
"FLAG_SHOW_VIBRATE_HINT",
+ "FLAG_FROM_KEY",
};
/** @hide */
@@ -641,14 +625,34 @@ public class AudioManager {
* @hide
*/
public AudioManager(Context context) {
- mApplicationContext = context.getApplicationContext();
- mUseMasterVolume = mApplicationContext.getResources().getBoolean(
- com.android.internal.R.bool.config_useMasterVolume);
- mUseVolumeKeySounds = mApplicationContext.getResources().getBoolean(
+ setContext(context);
+ mUseVolumeKeySounds = getContext().getResources().getBoolean(
com.android.internal.R.bool.config_useVolumeKeySounds);
- mUseFixedVolume = mApplicationContext.getResources().getBoolean(
+ mUseFixedVolume = getContext().getResources().getBoolean(
com.android.internal.R.bool.config_useFixedVolume);
sAudioPortEventHandler.init();
+
+ mPortListener = new OnAmPortUpdateListener();
+ registerAudioPortUpdateListener(mPortListener);
+ }
+
+ private Context getContext() {
+ if (mApplicationContext == null) {
+ setContext(mOriginalContext);
+ }
+ if (mApplicationContext != null) {
+ return mApplicationContext;
+ }
+ return mOriginalContext;
+ }
+
+ private void setContext(Context context) {
+ mApplicationContext = context.getApplicationContext();
+ if (mApplicationContext != null) {
+ mOriginalContext = null;
+ } else {
+ mOriginalContext = context;
+ }
}
private static IAudioService getService()
@@ -685,7 +689,7 @@ public class AudioManager {
* or {@link KeyEvent#KEYCODE_MEDIA_AUDIO_TRACK}.
*/
public void dispatchMediaKeyEvent(KeyEvent keyEvent) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.sendMediaButtonEvent(keyEvent, false);
}
@@ -700,18 +704,13 @@ public class AudioManager {
int keyCode = event.getKeyCode();
if (keyCode != KeyEvent.KEYCODE_VOLUME_DOWN && keyCode != KeyEvent.KEYCODE_VOLUME_UP
&& keyCode != KeyEvent.KEYCODE_VOLUME_MUTE
- && mVolumeKeyUpTime + AudioService.PLAY_SOUND_DELAY
- > SystemClock.uptimeMillis()) {
+ && mVolumeKeyUpTime + AudioSystem.PLAY_SOUND_DELAY > SystemClock.uptimeMillis()) {
/*
* The user has hit another key during the delay (e.g., 300ms)
* since the last volume key up, so cancel any sounds.
*/
- if (mUseMasterVolume) {
- adjustMasterVolume(ADJUST_SAME, AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
- } else {
- adjustSuggestedStreamVolume(ADJUST_SAME,
- stream, AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
- }
+ adjustSuggestedStreamVolume(ADJUST_SAME,
+ stream, AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
}
}
@@ -727,26 +726,16 @@ public class AudioManager {
* Adjust the volume in on key down since it is more
* responsive to the user.
*/
- int flags = FLAG_SHOW_UI | FLAG_VIBRATE;
-
- if (mUseMasterVolume) {
- adjustMasterVolume(
- keyCode == KeyEvent.KEYCODE_VOLUME_UP
- ? ADJUST_RAISE
- : ADJUST_LOWER,
- flags);
- } else {
- adjustSuggestedStreamVolume(
- keyCode == KeyEvent.KEYCODE_VOLUME_UP
- ? ADJUST_RAISE
- : ADJUST_LOWER,
- stream,
- flags);
- }
+ adjustSuggestedStreamVolume(
+ keyCode == KeyEvent.KEYCODE_VOLUME_UP
+ ? ADJUST_RAISE
+ : ADJUST_LOWER,
+ stream,
+ FLAG_SHOW_UI | FLAG_VIBRATE);
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
if (event.getRepeatCount() == 0) {
- MediaSessionLegacyHelper.getHelper(mApplicationContext)
+ MediaSessionLegacyHelper.getHelper(getContext())
.sendVolumeKeyEvent(event, false);
}
break;
@@ -766,20 +755,15 @@ public class AudioManager {
* sound to play when a user holds down volume down to mute.
*/
if (mUseVolumeKeySounds) {
- if (mUseMasterVolume) {
- adjustMasterVolume(ADJUST_SAME, FLAG_PLAY_SOUND);
- } else {
- int flags = FLAG_PLAY_SOUND;
- adjustSuggestedStreamVolume(
- ADJUST_SAME,
- stream,
- flags);
- }
+ adjustSuggestedStreamVolume(
+ ADJUST_SAME,
+ stream,
+ FLAG_PLAY_SOUND);
}
mVolumeKeyUpTime = SystemClock.uptimeMillis();
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
- MediaSessionLegacyHelper.getHelper(mApplicationContext)
+ MediaSessionLegacyHelper.getHelper(getContext())
.sendVolumeKeyEvent(event, false);
break;
}
@@ -824,13 +808,8 @@ public class AudioManager {
public void adjustStreamVolume(int streamType, int direction, int flags) {
IAudioService service = getService();
try {
- if (mUseMasterVolume) {
- service.adjustMasterVolume(direction, flags,
- mApplicationContext.getOpPackageName());
- } else {
- service.adjustStreamVolume(streamType, direction, flags,
- mApplicationContext.getOpPackageName());
- }
+ service.adjustStreamVolume(streamType, direction, flags,
+ getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in adjustStreamVolume", e);
}
@@ -842,13 +821,17 @@ public class AudioManager {
* screen is showing. Another example, if music is playing in the background
* and a call is not active, the music stream will be adjusted.
* <p>
- * This method should only be used by applications that replace the platform-wide
- * management of audio settings or the main telephony application.
- * <p>This method has no effect if the device implements a fixed volume policy
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
+ * <p>
+ * This method has no effect if the device implements a fixed volume policy
* as indicated by {@link #isVolumeFixed()}.
+ *
* @param direction The direction to adjust the volume. One of
- * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or
- * {@link #ADJUST_SAME}.
+ * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE},
+ * {@link #ADJUST_SAME}, {@link #ADJUST_MUTE},
+ * {@link #ADJUST_UNMUTE}, or {@link #ADJUST_TOGGLE_MUTE}.
* @param flags One or more flags.
* @see #adjustSuggestedStreamVolume(int, int, int)
* @see #adjustStreamVolume(int, int, int)
@@ -856,35 +839,28 @@ public class AudioManager {
* @see #isVolumeFixed()
*/
public void adjustVolume(int direction, int flags) {
- IAudioService service = getService();
- try {
- if (mUseMasterVolume) {
- service.adjustMasterVolume(direction, flags,
- mApplicationContext.getOpPackageName());
- } else {
- MediaSessionLegacyHelper helper =
- MediaSessionLegacyHelper.getHelper(mApplicationContext);
- helper.sendAdjustVolumeBy(USE_DEFAULT_STREAM_TYPE, direction, flags);
- }
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in adjustVolume", e);
- }
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.sendAdjustVolumeBy(USE_DEFAULT_STREAM_TYPE, direction, flags);
}
/**
* Adjusts the volume of the most relevant stream, or the given fallback
* stream.
* <p>
- * This method should only be used by applications that replace the platform-wide
- * management of audio settings or the main telephony application.
- *
- * <p>This method has no effect if the device implements a fixed volume policy
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
+ * <p>
+ * This method has no effect if the device implements a fixed volume policy
* as indicated by {@link #isVolumeFixed()}.
+ *
* @param direction The direction to adjust the volume. One of
- * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or
- * {@link #ADJUST_SAME}.
+ * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE},
+ * {@link #ADJUST_SAME}, {@link #ADJUST_MUTE},
+ * {@link #ADJUST_UNMUTE}, or {@link #ADJUST_TOGGLE_MUTE}.
* @param suggestedStreamType The stream type that will be used if there
- * isn't a relevant stream. {@link #USE_DEFAULT_STREAM_TYPE} is valid here.
+ * isn't a relevant stream. {@link #USE_DEFAULT_STREAM_TYPE} is
+ * valid here.
* @param flags One or more flags.
* @see #adjustVolume(int, int)
* @see #adjustStreamVolume(int, int, int)
@@ -892,36 +868,17 @@ public class AudioManager {
* @see #isVolumeFixed()
*/
public void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags) {
- IAudioService service = getService();
- try {
- if (mUseMasterVolume) {
- service.adjustMasterVolume(direction, flags,
- mApplicationContext.getOpPackageName());
- } else {
- MediaSessionLegacyHelper helper =
- MediaSessionLegacyHelper.getHelper(mApplicationContext);
- helper.sendAdjustVolumeBy(suggestedStreamType, direction, flags);
- }
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in adjustSuggestedStreamVolume", e);
- }
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.sendAdjustVolumeBy(suggestedStreamType, direction, flags);
}
- /**
- * Adjusts the master volume for the device's audio amplifier.
- * <p>
- *
- * @param steps The number of volume steps to adjust. A positive
- * value will raise the volume.
- * @param flags One or more flags.
- * @hide
- */
- public void adjustMasterVolume(int steps, int flags) {
+ /** @hide */
+ public void setMasterMute(boolean mute, int flags) {
IAudioService service = getService();
try {
- service.adjustMasterVolume(steps, flags, mApplicationContext.getOpPackageName());
+ service.setMasterMute(mute, flags, getContext().getOpPackageName());
} catch (RemoteException e) {
- Log.e(TAG, "Dead object in adjustMasterVolume", e);
+ Log.e(TAG, "Dead object in setMasterMute", e);
}
}
@@ -973,11 +930,7 @@ public class AudioManager {
public int getStreamMaxVolume(int streamType) {
IAudioService service = getService();
try {
- if (mUseMasterVolume) {
- return service.getMasterMaxVolume();
- } else {
- return service.getStreamMaxVolume(streamType);
- }
+ return service.getStreamMaxVolume(streamType);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in getStreamMaxVolume", e);
return 0;
@@ -985,6 +938,24 @@ public class AudioManager {
}
/**
+ * Returns the minimum volume index for a particular stream.
+ *
+ * @param streamType The stream type whose minimum volume index is returned.
+ * @return The minimum valid volume index for the stream.
+ * @see #getStreamVolume(int)
+ * @hide
+ */
+ public int getStreamMinVolume(int streamType) {
+ IAudioService service = getService();
+ try {
+ return service.getStreamMinVolume(streamType);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in getStreamMinVolume", e);
+ return 0;
+ }
+ }
+
+ /**
* Returns the current volume index for a particular stream.
*
* @param streamType The stream type whose volume index is returned.
@@ -995,11 +966,7 @@ public class AudioManager {
public int getStreamVolume(int streamType) {
IAudioService service = getService();
try {
- if (mUseMasterVolume) {
- return service.getMasterVolume();
- } else {
- return service.getStreamVolume(streamType);
- }
+ return service.getStreamVolume(streamType);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in getStreamVolume", e);
return 0;
@@ -1014,11 +981,7 @@ public class AudioManager {
public int getLastAudibleStreamVolume(int streamType) {
IAudioService service = getService();
try {
- if (mUseMasterVolume) {
- return service.getLastAudibleMasterVolume();
- } else {
- return service.getLastAudibleStreamVolume(streamType);
- }
+ return service.getLastAudibleStreamVolume(streamType);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in getLastAudibleStreamVolume", e);
return 0;
@@ -1031,12 +994,12 @@ public class AudioManager {
* It is assumed that this stream type is also tied to ringer mode changes.
* @hide
*/
- public int getMasterStreamType() {
+ public int getUiSoundsStreamType() {
IAudioService service = getService();
try {
- return service.getMasterStreamType();
+ return service.getUiSoundsStreamType();
} catch (RemoteException e) {
- Log.e(TAG, "Dead object in getMasterStreamType", e);
+ Log.e(TAG, "Dead object in getUiSoundsStreamType", e);
return STREAM_RING;
}
}
@@ -1060,7 +1023,7 @@ public class AudioManager {
}
IAudioService service = getService();
try {
- service.setRingerModeExternal(ringerMode, mApplicationContext.getOpPackageName());
+ service.setRingerModeExternal(ringerMode, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setRingerMode", e);
}
@@ -1081,149 +1044,79 @@ public class AudioManager {
public void setStreamVolume(int streamType, int index, int flags) {
IAudioService service = getService();
try {
- if (mUseMasterVolume) {
- service.setMasterVolume(index, flags, mApplicationContext.getOpPackageName());
- } else {
- service.setStreamVolume(streamType, index, flags,
- mApplicationContext.getOpPackageName());
- }
+ service.setStreamVolume(streamType, index, flags, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setStreamVolume", e);
}
}
/**
- * Returns the maximum volume index for master volume.
- *
- * @hide
- */
- public int getMasterMaxVolume() {
- IAudioService service = getService();
- try {
- return service.getMasterMaxVolume();
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in getMasterMaxVolume", e);
- return 0;
- }
- }
-
- /**
- * Returns the current volume index for master volume.
- *
- * @return The current volume index for master volume.
- * @hide
- */
- public int getMasterVolume() {
- IAudioService service = getService();
- try {
- return service.getMasterVolume();
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in getMasterVolume", e);
- return 0;
- }
- }
-
- /**
- * Get last audible volume before master volume was muted.
- *
- * @hide
- */
- public int getLastAudibleMasterVolume() {
- IAudioService service = getService();
- try {
- return service.getLastAudibleMasterVolume();
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in getLastAudibleMasterVolume", e);
- return 0;
- }
- }
-
- /**
- * Sets the volume index for master volume.
- *
- * @param index The volume index to set. See
- * {@link #getMasterMaxVolume()} for the largest valid value.
- * @param flags One or more flags.
- * @see #getMasterMaxVolume()
- * @see #getMasterVolume()
- * @hide
- */
- public void setMasterVolume(int index, int flags) {
- IAudioService service = getService();
- try {
- service.setMasterVolume(index, flags, mApplicationContext.getOpPackageName());
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setMasterVolume", e);
- }
- }
-
- /**
- * Solo or unsolo a particular stream. All other streams are muted.
+ * Solo or unsolo a particular stream.
* <p>
- * The solo command is protected against client process death: if a process
- * with an active solo request on a stream dies, all streams that were muted
- * because of this request will be unmuted automatically.
- * <p>
- * The solo requests for a given stream are cumulative: the AudioManager
- * can receive several solo requests from one or more clients and the stream
- * will be unsoloed only when the same number of unsolo requests are received.
- * <p>
- * For a better user experience, applications MUST unsolo a soloed stream
- * in onPause() and solo is again in onResume() if appropriate.
- * <p>This method has no effect if the device implements a fixed volume policy
- * as indicated by {@link #isVolumeFixed()}.
+ * Do not use. This method has been deprecated and is now a no-op.
+ * {@link #requestAudioFocus} should be used for exclusive audio playback.
*
* @param streamType The stream to be soloed/unsoloed.
- * @param state The required solo state: true for solo ON, false for solo OFF
- *
+ * @param state The required solo state: true for solo ON, false for solo
+ * OFF
* @see #isVolumeFixed()
+ * @deprecated Do not use. If you need exclusive audio playback use
+ * {@link #requestAudioFocus}.
*/
+ @Deprecated
public void setStreamSolo(int streamType, boolean state) {
- IAudioService service = getService();
- try {
- service.setStreamSolo(streamType, state, mICallBack);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setStreamSolo", e);
- }
+ Log.w(TAG, "setStreamSolo has been deprecated. Do not use.");
}
/**
* Mute or unmute an audio stream.
* <p>
- * The mute command is protected against client process death: if a process
- * with an active mute request on a stream dies, this stream will be unmuted
- * automatically.
- * <p>
- * The mute requests for a given stream are cumulative: the AudioManager
- * can receive several mute requests from one or more clients and the stream
- * will be unmuted only when the same number of unmute requests are received.
- * <p>
- * For a better user experience, applications MUST unmute a muted stream
- * in onPause() and mute is again in onResume() if appropriate.
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
* <p>
- * This method should only be used by applications that replace the platform-wide
- * management of audio settings or the main telephony application.
- * <p>This method has no effect if the device implements a fixed volume policy
+ * This method has no effect if the device implements a fixed volume policy
* as indicated by {@link #isVolumeFixed()}.
+ * <p>
+ * This method was deprecated in API level 22. Prior to API level 22 this
+ * method had significantly different behavior and should be used carefully.
+ * The following applies only to pre-22 platforms:
+ * <ul>
+ * <li>The mute command is protected against client process death: if a
+ * process with an active mute request on a stream dies, this stream will be
+ * unmuted automatically.</li>
+ * <li>The mute requests for a given stream are cumulative: the AudioManager
+ * can receive several mute requests from one or more clients and the stream
+ * will be unmuted only when the same number of unmute requests are
+ * received.</li>
+ * <li>For a better user experience, applications MUST unmute a muted stream
+ * in onPause() and mute is again in onResume() if appropriate.</li>
+ * </ul>
*
* @param streamType The stream to be muted/unmuted.
- * @param state The required mute state: true for mute ON, false for mute OFF
- *
+ * @param state The required mute state: true for mute ON, false for mute
+ * OFF
* @see #isVolumeFixed()
+ * @deprecated Use {@link #adjustStreamVolume(int, int, int)} with
+ * {@link #ADJUST_MUTE} or {@link #ADJUST_UNMUTE} instead.
*/
+ @Deprecated
public void setStreamMute(int streamType, boolean state) {
- IAudioService service = getService();
- try {
- service.setStreamMute(streamType, state, mICallBack);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setStreamMute", e);
+ Log.w(TAG, "setStreamMute is deprecated. adjustStreamVolume should be used instead.");
+ int direction = state ? ADJUST_MUTE : ADJUST_UNMUTE;
+ if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
+ adjustSuggestedStreamVolume(direction, streamType, 0);
+ } else {
+ adjustStreamVolume(streamType, direction, 0);
}
}
/**
- * get stream mute state.
+ * Returns the current mute state for a particular stream.
*
- * @hide
+ * @param streamType The stream to get mute state for.
+ * @return The mute state for the given stream.
+ * @see #adjustStreamVolume(int, int, int)
*/
public boolean isStreamMute(int streamType) {
IAudioService service = getService();
@@ -1236,29 +1129,6 @@ public class AudioManager {
}
/**
- * set master mute state.
- *
- * @hide
- */
- public void setMasterMute(boolean state) {
- setMasterMute(state, FLAG_SHOW_UI);
- }
-
- /**
- * set master mute state with optional flags.
- *
- * @hide
- */
- public void setMasterMute(boolean state, int flags) {
- IAudioService service = getService();
- try {
- service.setMasterMute(state, flags, mApplicationContext.getOpPackageName(), mICallBack);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setMasterMute", e);
- }
- }
-
- /**
* get master mute state.
*
* @hide
@@ -1281,9 +1151,6 @@ public class AudioManager {
* @hide
*/
public void forceVolumeControlStream(int streamType) {
- if (mUseMasterVolume) {
- return;
- }
IAudioService service = getService();
try {
service.forceVolumeControlStream(streamType, mICallBack);
@@ -1490,7 +1357,7 @@ public class AudioManager {
* @see #startBluetoothSco()
*/
public boolean isBluetoothScoAvailableOffCall() {
- return mApplicationContext.getResources().getBoolean(
+ return getContext().getResources().getBoolean(
com.android.internal.R.bool.config_bluetooth_sco_off_call);
}
@@ -1543,7 +1410,7 @@ public class AudioManager {
IAudioService service = getService();
try {
service.startBluetoothSco(mICallBack,
- mApplicationContext.getApplicationInfo().targetSdkVersion);
+ getContext().getApplicationInfo().targetSdkVersion);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in startBluetoothSco", e);
}
@@ -1691,7 +1558,7 @@ public class AudioManager {
public void setMicrophoneMute(boolean on){
IAudioService service = getService();
try {
- service.setMicrophoneMute(on, mApplicationContext.getOpPackageName());
+ service.setMicrophoneMute(on, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setMicrophoneMute", e);
}
@@ -1724,7 +1591,7 @@ public class AudioManager {
public void setMode(int mode) {
IAudioService service = getService();
try {
- service.setMode(mode, mICallBack);
+ service.setMode(mode, mICallBack, mApplicationContext.getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setMode", e);
}
@@ -2122,7 +1989,7 @@ public class AudioManager {
* Settings has an in memory cache, so this is fast.
*/
private boolean querySoundEffectsEnabled(int user) {
- return Settings.System.getIntForUser(mApplicationContext.getContentResolver(),
+ return Settings.System.getIntForUser(getContext().getContentResolver(),
Settings.System.SOUND_EFFECTS_ENABLED, 0, user) != 0;
}
@@ -2534,7 +2401,7 @@ public class AudioManager {
try {
status = service.requestAudioFocus(requestAttributes, durationHint, mICallBack,
mAudioFocusDispatcher, getIdForAudioFocusListener(l),
- mApplicationContext.getOpPackageName() /* package name */, flags,
+ getContext().getOpPackageName() /* package name */, flags,
ap != null ? ap.cb() : null);
} catch (RemoteException e) {
Log.e(TAG, "Can't call requestAudioFocus() on AudioService:", e);
@@ -2558,8 +2425,8 @@ public class AudioManager {
service.requestAudioFocus(new AudioAttributes.Builder()
.setInternalLegacyStreamType(streamType).build(),
durationHint, mICallBack, null,
- MediaFocusControl.IN_VOICE_COMM_FOCUS_ID,
- mApplicationContext.getOpPackageName(),
+ AudioSystem.IN_VOICE_COMM_FOCUS_ID,
+ getContext().getOpPackageName(),
AUDIOFOCUS_FLAG_LOCK,
null /* policy token */);
} catch (RemoteException e) {
@@ -2576,7 +2443,7 @@ public class AudioManager {
public void abandonAudioFocusForCall() {
IAudioService service = getService();
try {
- service.abandonAudioFocus(null, MediaFocusControl.IN_VOICE_COMM_FOCUS_ID,
+ service.abandonAudioFocus(null, AudioSystem.IN_VOICE_COMM_FOCUS_ID,
null /*AudioAttributes, legacy behavior*/);
} catch (RemoteException e) {
Log.e(TAG, "Can't call abandonAudioFocusForCall() on AudioService:", e);
@@ -2628,7 +2495,7 @@ public class AudioManager {
if (eventReceiver == null) {
return;
}
- if (!eventReceiver.getPackageName().equals(mApplicationContext.getPackageName())) {
+ if (!eventReceiver.getPackageName().equals(getContext().getPackageName())) {
Log.e(TAG, "registerMediaButtonEventReceiver() error: " +
"receiver and context package names don't match");
return;
@@ -2637,7 +2504,7 @@ public class AudioManager {
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
// the associated intent will be handled by the component being registered
mediaButtonIntent.setComponent(eventReceiver);
- PendingIntent pi = PendingIntent.getBroadcast(mApplicationContext,
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
registerMediaButtonIntent(pi, eventReceiver);
}
@@ -2671,8 +2538,8 @@ public class AudioManager {
Log.e(TAG, "Cannot call registerMediaButtonIntent() with a null parameter");
return;
}
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
- helper.addMediaButtonListener(pi, eventReceiver, mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.addMediaButtonListener(pi, eventReceiver, getContext());
}
/**
@@ -2690,7 +2557,7 @@ public class AudioManager {
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
// the associated intent will be handled by the component being registered
mediaButtonIntent.setComponent(eventReceiver);
- PendingIntent pi = PendingIntent.getBroadcast(mApplicationContext,
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
unregisterMediaButtonIntent(pi);
}
@@ -2713,7 +2580,7 @@ public class AudioManager {
* @hide
*/
public void unregisterMediaButtonIntent(PendingIntent pi) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.removeMediaButtonListener(pi);
}
@@ -2730,7 +2597,7 @@ public class AudioManager {
if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
return;
}
- rcClient.registerWithSession(MediaSessionLegacyHelper.getHelper(mApplicationContext));
+ rcClient.registerWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
}
/**
@@ -2745,7 +2612,7 @@ public class AudioManager {
if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
return;
}
- rcClient.unregisterWithSession(MediaSessionLegacyHelper.getHelper(mApplicationContext));
+ rcClient.unregisterWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
}
/**
@@ -2753,7 +2620,7 @@ public class AudioManager {
* metadata updates and playback state information from applications using
* {@link RemoteControlClient}, and control their playback.
* <p>
- * Registration requires the {@link OnClientUpdateListener} listener to be
+ * Registration requires the {@link RemoteController.OnClientUpdateListener} listener to be
* one of the enabled notification listeners (see
* {@link android.service.notification.NotificationListenerService}).
*
@@ -3260,10 +3127,11 @@ public class AudioManager {
* @param name device name
* {@hide}
*/
- public void setWiredDeviceConnectionState(int device, int state, String name) {
+ public void setWiredDeviceConnectionState(int type, int state, String address, String name) {
IAudioService service = getService();
try {
- service.setWiredDeviceConnectionState(device, state, name);
+ service.setWiredDeviceConnectionState(type, state, address, name,
+ mApplicationContext.getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setWiredDeviceConnectionState "+e);
}
@@ -3288,9 +3156,8 @@ public class AudioManager {
delay = service.setBluetoothA2dpDeviceConnectionState(device, state, profile);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setBluetoothA2dpDeviceConnectionState "+e);
- } finally {
- return delay;
}
+ return delay;
}
/** {@hide} */
@@ -3317,6 +3184,20 @@ public class AudioManager {
"android.media.property.OUTPUT_FRAMES_PER_BUFFER";
/**
+ * Used as a key for {@link #getProperty} to determine if the default microphone audio source
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_MIC_NEAR_ULTRASOUND";
+
+ /**
+ * Used as a key for {@link #getProperty} to determine if the default speaker audio path
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_SPEAKER_NEAR_ULTRASOUND";
+
+ /**
* Returns the value of the property with the specified key.
* @param key One of the strings corresponding to a property key: either
* {@link #PROPERTY_OUTPUT_SAMPLE_RATE} or
@@ -3331,6 +3212,12 @@ public class AudioManager {
} else if (PROPERTY_OUTPUT_FRAMES_PER_BUFFER.equals(key)) {
int outputFramesPerBuffer = AudioSystem.getPrimaryOutputFrameCount();
return outputFramesPerBuffer > 0 ? Integer.toString(outputFramesPerBuffer) : null;
+ } else if (PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND.equals(key)) {
+ return SystemProperties.get(SYSTEM_PROPERTY_MIC_NEAR_ULTRASOUND,
+ DEFAULT_RESULT_FALSE_STRING);
+ } else if (PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND.equals(key)) {
+ return SystemProperties.get(SYSTEM_PROPERTY_SPEAKER_NEAR_ULTRASOUND,
+ DEFAULT_RESULT_FALSE_STRING);
} else {
// null or unknown key
return null;
@@ -3392,9 +3279,22 @@ public class AudioManager {
* Only useful for volume controllers.
* @hide
*/
+ public boolean isStreamAffectedByMute(int streamType) {
+ try {
+ return getService().isStreamAffectedByMute(streamType);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Error calling isStreamAffectedByMute", e);
+ return false;
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
public void disableSafeMediaVolume() {
try {
- getService().disableSafeMediaVolume();
+ getService().disableSafeMediaVolume(mApplicationContext.getOpPackageName());
} catch (RemoteException e) {
Log.w(TAG, "Error disabling safe media volume", e);
}
@@ -3406,7 +3306,7 @@ public class AudioManager {
*/
public void setRingerModeInternal(int ringerMode) {
try {
- getService().setRingerModeInternal(ringerMode, mApplicationContext.getOpPackageName());
+ getService().setRingerModeInternal(ringerMode, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.w(TAG, "Error calling setRingerModeInternal", e);
}
@@ -3426,6 +3326,18 @@ public class AudioManager {
}
/**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public void setVolumePolicy(VolumePolicy policy) {
+ try {
+ getService().setVolumePolicy(policy);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Error calling setVolumePolicy", e);
+ }
+ }
+
+ /**
* Set Hdmi Cec system audio mode.
*
* @param on whether to be on system audio mode
@@ -3495,8 +3407,18 @@ public class AudioManager {
* @param ports An AudioPort ArrayList where the list will be returned.
* @hide
*/
- public int listAudioPorts(ArrayList<AudioPort> ports) {
- return updateAudioPortCache(ports, null);
+ public static int listAudioPorts(ArrayList<AudioPort> ports) {
+ return updateAudioPortCache(ports, null, null);
+ }
+
+ /**
+ * Returns a list of descriptors for all audio ports managed by the audio framework as
+ * it was before the last update calback.
+ * @param ports An AudioPort ArrayList where the list will be returned.
+ * @hide
+ */
+ public static int listPreviousAudioPorts(ArrayList<AudioPort> ports) {
+ return updateAudioPortCache(null, null, ports);
}
/**
@@ -3504,21 +3426,46 @@ public class AudioManager {
* @see listAudioPorts(ArrayList<AudioPort>)
* @hide
*/
- public int listAudioDevicePorts(ArrayList<AudioPort> devices) {
+ public static int listAudioDevicePorts(ArrayList<AudioDevicePort> devices) {
+ if (devices == null) {
+ return ERROR_BAD_VALUE;
+ }
ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
- int status = updateAudioPortCache(ports, null);
+ int status = updateAudioPortCache(ports, null, null);
if (status == SUCCESS) {
- devices.clear();
- for (int i = 0; i < ports.size(); i++) {
- if (ports.get(i) instanceof AudioDevicePort) {
- devices.add(ports.get(i));
- }
- }
+ filterDevicePorts(ports, devices);
}
return status;
}
/**
+ * Specialized version of listPreviousAudioPorts() listing only audio devices (AudioDevicePort)
+ * @see listPreviousAudioPorts(ArrayList<AudioPort>)
+ * @hide
+ */
+ public static int listPreviousAudioDevicePorts(ArrayList<AudioDevicePort> devices) {
+ if (devices == null) {
+ return ERROR_BAD_VALUE;
+ }
+ ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
+ int status = updateAudioPortCache(null, null, ports);
+ if (status == SUCCESS) {
+ filterDevicePorts(ports, devices);
+ }
+ return status;
+ }
+
+ private static void filterDevicePorts(ArrayList<AudioPort> ports,
+ ArrayList<AudioDevicePort> devices) {
+ devices.clear();
+ for (int i = 0; i < ports.size(); i++) {
+ if (ports.get(i) instanceof AudioDevicePort) {
+ devices.add((AudioDevicePort)ports.get(i));
+ }
+ }
+ }
+
+ /**
* Create a connection between two or more devices. The framework will reject the request if
* device types are not compatible or the implementation does not support the requested
* configuration.
@@ -3541,7 +3488,7 @@ public class AudioManager {
* patch[0] contains the newly created patch
* @hide
*/
- public int createAudioPatch(AudioPatch[] patch,
+ public static int createAudioPatch(AudioPatch[] patch,
AudioPortConfig[] sources,
AudioPortConfig[] sinks) {
return AudioSystem.createAudioPatch(patch, sources, sinks);
@@ -3558,7 +3505,7 @@ public class AudioManager {
* - {@link #ERROR} if patch cannot be released for any other reason.
* @hide
*/
- public int releaseAudioPatch(AudioPatch patch) {
+ public static int releaseAudioPatch(AudioPatch patch) {
return AudioSystem.releaseAudioPatch(patch);
}
@@ -3567,8 +3514,8 @@ public class AudioManager {
* @param patches An AudioPatch array where the list will be returned.
* @hide
*/
- public int listAudioPatches(ArrayList<AudioPatch> patches) {
- return updateAudioPortCache(null, patches);
+ public static int listAudioPatches(ArrayList<AudioPatch> patches) {
+ return updateAudioPortCache(null, patches, null);
}
/**
@@ -3576,7 +3523,7 @@ public class AudioManager {
* AudioGain.buildConfig()
* @hide
*/
- public int setAudioPortGain(AudioPort port, AudioGainConfig gain) {
+ public static int setAudioPortGain(AudioPort port, AudioGainConfig gain) {
if (port == null || gain == null) {
return ERROR_BAD_VALUE;
}
@@ -3634,6 +3581,7 @@ public class AudioManager {
static final int AUDIOPORT_GENERATION_INIT = 0;
static Integer sAudioPortGeneration = new Integer(AUDIOPORT_GENERATION_INIT);
static ArrayList<AudioPort> sAudioPortsCached = new ArrayList<AudioPort>();
+ static ArrayList<AudioPort> sPreviousAudioPortsCached = new ArrayList<AudioPort>();
static ArrayList<AudioPatch> sAudioPatchesCached = new ArrayList<AudioPatch>();
static int resetAudioPortGeneration() {
@@ -3645,7 +3593,8 @@ public class AudioManager {
return generation;
}
- static int updateAudioPortCache(ArrayList<AudioPort> ports, ArrayList<AudioPatch> patches) {
+ static int updateAudioPortCache(ArrayList<AudioPort> ports, ArrayList<AudioPatch> patches,
+ ArrayList<AudioPort> previousPorts) {
synchronized (sAudioPortGeneration) {
if (sAudioPortGeneration == AUDIOPORT_GENERATION_INIT) {
@@ -3704,6 +3653,7 @@ public class AudioManager {
}
}
+ sPreviousAudioPortsCached = sAudioPortsCached;
sAudioPortsCached = newPorts;
sAudioPatchesCached = newPatches;
sAudioPortGeneration = portGeneration[0];
@@ -3716,6 +3666,10 @@ public class AudioManager {
patches.clear();
patches.addAll(sAudioPatchesCached);
}
+ if (previousPorts != null) {
+ previousPorts.clear();
+ previousPorts.addAll(sPreviousAudioPortsCached);
+ }
}
return SUCCESS;
}
@@ -3749,4 +3703,303 @@ public class AudioManager {
portCfg.format(),
gainCfg);
}
+
+ private OnAmPortUpdateListener mPortListener = null;
+
+ /**
+ * The message sent to apps when the contents of the device list changes if they provide
+ * a {#link Handler} object to addOnAudioDeviceConnectionListener().
+ */
+ private final static int MSG_DEVICES_DEVICES_ADDED = 0;
+ private final static int MSG_DEVICES_DEVICES_REMOVED = 1;
+
+ /**
+ * The list of {@link AudioDeviceCallback} objects to receive add/remove notifications.
+ */
+ private ArrayMap<AudioDeviceCallback, NativeEventHandlerDelegate>
+ mDeviceCallbacks =
+ new ArrayMap<AudioDeviceCallback, NativeEventHandlerDelegate>();
+
+ /**
+ * The following are flags to allow users of {@link AudioManager#getDevices(int)} to filter
+ * the results list to only those device types they are interested in.
+ */
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include
+ * source (i.e. input) audio devices.
+ */
+ public static final int GET_DEVICES_INPUTS = 0x0001;
+
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include
+ * sink (i.e. output) audio devices.
+ */
+ public static final int GET_DEVICES_OUTPUTS = 0x0002;
+
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include both
+ * source and sink devices.
+ */
+ public static final int GET_DEVICES_ALL = GET_DEVICES_OUTPUTS | GET_DEVICES_INPUTS;
+
+ /**
+ * Determines if a given AudioDevicePort meets the specified filter criteria.
+ * @param port The port to test.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link GET_DEVICES_OUTPUTS} and {@link GET_DEVICES_INPUTS}
+ **/
+ private static boolean checkFlags(AudioDevicePort port, int flags) {
+ return port.role() == AudioPort.ROLE_SINK && (flags & GET_DEVICES_OUTPUTS) != 0 ||
+ port.role() == AudioPort.ROLE_SOURCE && (flags & GET_DEVICES_INPUTS) != 0;
+ }
+
+ /**
+ * Returns an array of {@link AudioDeviceInfo} objects corresponding to the audio devices
+ * currently connected to the system and meeting the criteria specified in the
+ * <code>flags</code> parameter.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link GET_DEVICES_OUTPUTS}, {@link GET_DEVICES_INPUTS} and {@link GET_DEVICES_ALL}.
+ * @return A (possibly zero-length) array of AudioDeviceInfo objects.
+ */
+ public AudioDeviceInfo[] getDevices(int flags) {
+ return getDevicesStatic(flags);
+ }
+
+ /**
+ * Does the actual computation to generate an array of (externally-visible) AudioDeviceInfo
+ * objects from the current (internal) AudioDevicePort list.
+ */
+ private static AudioDeviceInfo[]
+ infoListFromPortList(ArrayList<AudioDevicePort> ports, int flags) {
+
+ // figure out how many AudioDeviceInfo we need space for...
+ int numRecs = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkFlags(port, flags)) {
+ numRecs++;
+ }
+ }
+
+ // Now load them up...
+ AudioDeviceInfo[] deviceList = new AudioDeviceInfo[numRecs];
+ int slot = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkFlags(port, flags)) {
+ deviceList[slot++] = new AudioDeviceInfo(port);
+ }
+ }
+
+ return deviceList;
+ }
+
+ /*
+ * Calculate the list of ports that are in ports_B, but not in ports_A. This is used by
+ * the add/remove callback mechanism to provide a list of the newly added or removed devices
+ * rather than the whole list and make the app figure it out.
+ * Note that calling this method with:
+ * ports_A == PREVIOUS_ports and ports_B == CURRENT_ports will calculated ADDED ports.
+ * ports_A == CURRENT_ports and ports_B == PREVIOUS_ports will calculated REMOVED ports.
+ */
+ private static AudioDeviceInfo[] calcListDeltas(
+ ArrayList<AudioDevicePort> ports_A, ArrayList<AudioDevicePort> ports_B, int flags) {
+
+ ArrayList<AudioDevicePort> delta_ports = new ArrayList<AudioDevicePort>();
+
+ AudioDevicePort cur_port = null;
+ for (int cur_index = 0; cur_index < ports_B.size(); cur_index++) {
+ boolean cur_port_found = false;
+ cur_port = ports_B.get(cur_index);
+ for (int prev_index = 0;
+ prev_index < ports_A.size() && !cur_port_found;
+ prev_index++) {
+ cur_port_found = (cur_port.id() == ports_A.get(prev_index).id());
+ }
+
+ if (!cur_port_found) {
+ delta_ports.add(cur_port);
+ }
+ }
+
+ return infoListFromPortList(delta_ports, flags);
+ }
+
+ /**
+ * Generates a list of AudioDeviceInfo objects corresponding to the audio devices currently
+ * connected to the system and meeting the criteria specified in the <code>flags</code>
+ * parameter.
+ * This is an internal function. The public API front is getDevices(int).
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link GET_DEVICES_OUTPUTS}, {@link GET_DEVICES_INPUTS} and {@link GET_DEVICES_ALL}.
+ * @return A (possibly zero-length) array of AudioDeviceInfo objects.
+ * @hide
+ */
+ public static AudioDeviceInfo[] getDevicesStatic(int flags) {
+ ArrayList<AudioDevicePort> ports = new ArrayList<AudioDevicePort>();
+ int status = AudioManager.listAudioDevicePorts(ports);
+ if (status != AudioManager.SUCCESS) {
+ // fail and bail!
+ return new AudioDeviceInfo[0]; // Always return an array.
+ }
+
+ return infoListFromPortList(ports, flags);
+ }
+
+ /**
+ * Registers an {@link AudioDeviceCallback} object to receive notifications of changes
+ * to the set of connected audio devices.
+ * @param callback The {@link AudioDeviceCallback} object to receive connect/disconnect
+ * notifications.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void registerAudioDeviceCallback(AudioDeviceCallback callback,
+ android.os.Handler handler) {
+ if (callback != null && !mDeviceCallbacks.containsKey(callback)) {
+ synchronized (mDeviceCallbacks) {
+ mDeviceCallbacks.put(
+ callback, new NativeEventHandlerDelegate(callback, handler));
+ }
+ }
+ }
+
+ /**
+ * Unregisters an {@link AudioDeviceCallback} object which has been previously registered
+ * to receive notifications of changes to the set of connected audio devices.
+ * @param callback The {@link AudioDeviceCallback} object that was previously registered
+ * with {@link AudioManager#registerAudioDeviceCallback) to be unregistered.
+ */
+ public void unregisterAudioDeviceCallback(AudioDeviceCallback callback) {
+ synchronized (mDeviceCallbacks) {
+ if (mDeviceCallbacks.containsKey(callback)) {
+ mDeviceCallbacks.remove(callback);
+ }
+ }
+ }
+
+ /**
+ * Internal method to compute and generate add/remove messages and then send to any
+ * registered callbacks.
+ */
+ private void broadcastDeviceListChange() {
+ int status;
+
+ ArrayList<AudioDevicePort> previous_ports = new ArrayList<AudioDevicePort>();
+ status = AudioManager.listPreviousAudioDevicePorts(previous_ports);
+ if (status != AudioManager.SUCCESS) {
+ return;
+ }
+
+ ArrayList<AudioDevicePort> current_ports = new ArrayList<AudioDevicePort>();
+ status = AudioManager.listAudioDevicePorts(current_ports);
+ if (status != AudioManager.SUCCESS) {
+ return;
+ }
+
+ AudioDeviceInfo[] added_devices =
+ calcListDeltas(previous_ports, current_ports, GET_DEVICES_ALL);
+ AudioDeviceInfo[] removed_devices =
+ calcListDeltas(current_ports, previous_ports, GET_DEVICES_ALL);
+
+ if (added_devices.length != 0 || removed_devices.length != 0) {
+ Collection<NativeEventHandlerDelegate> values;
+ synchronized (mDeviceCallbacks) {
+ values = mDeviceCallbacks.values();
+ }
+ for (NativeEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ if (added_devices.length != 0) {
+ handler.sendMessage(
+ Message.obtain(handler,MSG_DEVICES_DEVICES_ADDED, added_devices));
+ }
+ if (removed_devices.length != 0) {
+ handler.sendMessage(
+ Message.obtain(handler,MSG_DEVICES_DEVICES_REMOVED, removed_devices));
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Handles Port list update notifications from the AudioManager
+ */
+ private class OnAmPortUpdateListener implements AudioManager.OnAudioPortUpdateListener {
+ static final String TAG = "OnAmPortUpdateListener";
+ public void onAudioPortListUpdate(AudioPort[] portList) {
+ broadcastDeviceListChange();
+ }
+
+ /**
+ * Callback method called upon audio patch list update.
+ * Note: We don't do anything with Patches at this time, so ignore this notification.
+ * @param patchList the updated list of audio patches.
+ */
+ public void onAudioPatchListUpdate(AudioPatch[] patchList) {}
+
+ /**
+ * Callback method called when the mediaserver dies
+ */
+ public void onServiceDied() {
+ broadcastDeviceListChange();
+ }
+ }
+
+ //---------------------------------------------------------
+ // Inner classes
+ //--------------------
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread.
+ */
+ private class NativeEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeEventHandlerDelegate(final AudioDeviceCallback callback,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the addListener call was called in
+ looper = Looper.getMainLooper();
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_DEVICES_DEVICES_ADDED:
+ // call the OnAudioDeviceConnectionListener
+ if (callback != null) {
+ callback.onAudioDevicesAdded((AudioDeviceInfo[])msg.obj);
+ }
+ break;
+
+ case MSG_DEVICES_DEVICES_REMOVED:
+ if (callback != null) {
+ callback.onAudioDevicesRemoved((AudioDeviceInfo[])msg.obj);
+ }
+ break;
+
+ default:
+ Log.e(TAG, "Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
}
diff --git a/media/java/android/media/AudioManagerInternal.java b/media/java/android/media/AudioManagerInternal.java
index 616bdd1..ac59ace 100644
--- a/media/java/android/media/AudioManagerInternal.java
+++ b/media/java/android/media/AudioManagerInternal.java
@@ -15,8 +15,6 @@
*/
package android.media;
-import android.os.IBinder;
-
import com.android.server.LocalServices;
/**
@@ -29,8 +27,7 @@ import com.android.server.LocalServices;
public abstract class AudioManagerInternal {
public abstract void adjustSuggestedStreamVolumeForUid(int streamType, int direction,
- int flags,
- String callingPackage, int uid);
+ int flags, String callingPackage, int uid);
public abstract void adjustStreamVolumeForUid(int streamType, int direction, int flags,
String callingPackage, int uid);
@@ -38,25 +35,27 @@ public abstract class AudioManagerInternal {
public abstract void setStreamVolumeForUid(int streamType, int direction, int flags,
String callingPackage, int uid);
- public abstract void adjustMasterVolumeForUid(int steps, int flags, String callingPackage,
- int uid);
-
- public abstract void setMasterMuteForUid(boolean state, int flags, String callingPackage,
- IBinder cb, int uid);
-
public abstract void setRingerModeDelegate(RingerModeDelegate delegate);
public abstract int getRingerModeInternal();
public abstract void setRingerModeInternal(int ringerMode, String caller);
+ public abstract int getVolumeControllerUid();
+
+ public abstract void updateRingerModeAffectedStreamsInternal();
+
public interface RingerModeDelegate {
/** Called when external ringer mode is evaluated, returns the new internal ringer mode */
int onSetRingerModeExternal(int ringerModeOld, int ringerModeNew, String caller,
- int ringerModeInternal);
+ int ringerModeInternal, VolumePolicy policy);
/** Called when internal ringer mode is evaluated, returns the new external ringer mode */
int onSetRingerModeInternal(int ringerModeOld, int ringerModeNew, String caller,
- int ringerModeExternal);
+ int ringerModeExternal, VolumePolicy policy);
+
+ boolean canVolumeDownEnterSilent();
+
+ int getRingerModeAffectedStreams(int streams);
}
}
diff --git a/media/java/android/media/AudioMixPort.java b/media/java/android/media/AudioMixPort.java
index 1500a43..ab55c8d 100644
--- a/media/java/android/media/AudioMixPort.java
+++ b/media/java/android/media/AudioMixPort.java
@@ -20,15 +20,21 @@ package android.media;
* The AudioMixPort is a specialized type of AudioPort
* describing an audio mix or stream at an input or output stream of the audio
* framework.
+ * In addition to base audio port attributes, the mix descriptor contains:
+ * - the unique audio I/O handle assigned by AudioFlinger to this mix.
* @see AudioPort
* @hide
*/
public class AudioMixPort extends AudioPort {
- AudioMixPort(AudioHandle handle, int role, int[] samplingRates, int[] channelMasks,
+ private final int mIoHandle;
+
+ AudioMixPort(AudioHandle handle, int ioHandle, int role, String deviceName,
+ int[] samplingRates, int[] channelMasks,
int[] formats, AudioGain[] gains) {
- super(handle, role, samplingRates, channelMasks, formats, gains);
+ super(handle, role, deviceName, samplingRates, channelMasks, formats, gains);
+ mIoHandle = ioHandle;
}
/**
@@ -40,11 +46,23 @@ public class AudioMixPort extends AudioPort {
return new AudioMixPortConfig(this, samplingRate, channelMask, format, gain);
}
+ /**
+ * Get the device type (e.g AudioManager.DEVICE_OUT_SPEAKER)
+ */
+ public int ioHandle() {
+ return mIoHandle;
+ }
+
@Override
public boolean equals(Object o) {
if (o == null || !(o instanceof AudioMixPort)) {
return false;
}
+ AudioMixPort other = (AudioMixPort)o;
+ if (mIoHandle != other.ioHandle()) {
+ return false;
+ }
+
return super.equals(o);
}
diff --git a/media/java/android/media/AudioPort.java b/media/java/android/media/AudioPort.java
index 1ab7e89..7328d7a 100644
--- a/media/java/android/media/AudioPort.java
+++ b/media/java/android/media/AudioPort.java
@@ -16,7 +16,6 @@
package android.media;
-
/**
* An audio port is a node of the audio framework or hardware that can be connected to or
* disconnect from another audio node to create a specific audio routing configuration.
@@ -37,6 +36,7 @@ package android.media;
* @hide
*/
public class AudioPort {
+ private static final String TAG = "AudioPort";
/**
* For use by the audio framework.
@@ -68,16 +68,20 @@ public class AudioPort {
AudioHandle mHandle;
protected final int mRole;
+ private final String mName;
private final int[] mSamplingRates;
private final int[] mChannelMasks;
private final int[] mFormats;
private final AudioGain[] mGains;
private AudioPortConfig mActiveConfig;
- AudioPort(AudioHandle handle, int role, int[] samplingRates, int[] channelMasks,
+ AudioPort(AudioHandle handle, int role, String name,
+ int[] samplingRates, int[] channelMasks,
int[] formats, AudioGain[] gains) {
+
mHandle = handle;
mRole = role;
+ mName = name;
mSamplingRates = samplingRates;
mChannelMasks = channelMasks;
mFormats = formats;
@@ -89,6 +93,14 @@ public class AudioPort {
}
/**
+ * Get the system unique device ID.
+ */
+ public int id() {
+ return mHandle.id();
+ }
+
+
+ /**
* Get the audio port role
*/
public int role() {
@@ -96,6 +108,14 @@ public class AudioPort {
}
/**
+ * Get the human-readable name of this port. Perhaps an internal
+ * designation or an physical device.
+ */
+ public String name() {
+ return mName;
+ }
+
+ /**
* Get the list of supported sampling rates
* Empty array if sampling rate is not relevant for this audio port
*/
diff --git a/media/java/android/media/AudioPortEventHandler.java b/media/java/android/media/AudioPortEventHandler.java
index ba2a59d..c152245 100644
--- a/media/java/android/media/AudioPortEventHandler.java
+++ b/media/java/android/media/AudioPortEventHandler.java
@@ -19,8 +19,6 @@ package android.media;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
-import android.util.Log;
-
import java.util.ArrayList;
import java.lang.ref.WeakReference;
@@ -42,6 +40,12 @@ class AudioPortEventHandler {
private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
+ /**
+ * Accessed by native methods: JNI Callback context.
+ */
+ @SuppressWarnings("unused")
+ private long mJniCallback;
+
void init() {
synchronized (this) {
if (mHandler != null) {
@@ -65,9 +69,6 @@ class AudioPortEventHandler {
listeners = mListeners;
}
}
- if (listeners.isEmpty()) {
- return;
- }
// reset audio port cache if the event corresponds to a change coming
// from audio policy service or if mediaserver process died.
if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED ||
@@ -75,10 +76,15 @@ class AudioPortEventHandler {
msg.what == AUDIOPORT_EVENT_SERVICE_DIED) {
AudioManager.resetAudioPortGeneration();
}
+
+ if (listeners.isEmpty()) {
+ return;
+ }
+
ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
- int status = AudioManager.updateAudioPortCache(ports, patches);
+ int status = AudioManager.updateAudioPortCache(ports, patches, null);
if (status != AudioManager.SUCCESS) {
return;
}
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index de10ef9..7eb1357 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -16,10 +16,17 @@
package android.media;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
+import java.util.Collection;
import java.util.Iterator;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.app.ActivityThread;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
@@ -27,6 +34,7 @@ import android.os.Looper;
import android.os.Message;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.util.ArrayMap;
import android.util.Log;
/**
@@ -48,6 +56,12 @@ public class AudioRecord
//---------------------------------------------------------
// Constants
//--------------------
+
+ /** Minimum value for sample rate */
+ private static final int SAMPLE_RATE_HZ_MIN = 4000;
+ /** Maximum value for sample rate */
+ private static final int SAMPLE_RATE_HZ_MAX = 192000;
+
/**
* indicates AudioRecord state is not successfully initialized.
*/
@@ -107,6 +121,26 @@ public class AudioRecord
/** @hide */
public final static String SUBMIX_FIXED_VOLUME = "fixedVolume";
+ /** @hide */
+ @IntDef({
+ READ_BLOCKING,
+ READ_NON_BLOCKING
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReadMode {}
+
+ /**
+ * The read mode indicating the read operation will block until all data
+ * requested has been read.
+ */
+ public final static int READ_BLOCKING = 0;
+
+ /**
+ * The read mode indicating the read operation will return immediately after
+ * reading as much audio data as possible without blocking.
+ */
+ public final static int READ_NON_BLOCKING = 1;
+
//---------------------------------------------------------
// Used exclusively by native code
//--------------------
@@ -122,6 +156,12 @@ public class AudioRecord
@SuppressWarnings("unused")
private long mNativeCallbackCookie;
+ /**
+ * Accessed by native methods: provides access to the JNIDeviceCallback instance.
+ */
+ @SuppressWarnings("unused")
+ private long mNativeDeviceCallback;
+
//---------------------------------------------------------
// Member variables
@@ -135,13 +175,18 @@ public class AudioRecord
*/
private int mChannelCount;
/**
- * The audio channel mask
+ * The audio channel position mask
*/
private int mChannelMask;
/**
+ * The audio channel index mask
+ */
+ private int mChannelIndexMask;
+ /**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
* @see AudioFormat#ENCODING_PCM_16BIT
+ * @see AudioFormat#ENCODING_PCM_FLOAT
*/
private int mAudioFormat;
/**
@@ -201,8 +246,8 @@ public class AudioRecord
* Though some invalid parameters will result in an {@link IllegalArgumentException} exception,
* other errors do not. Thus you should call {@link #getState()} immediately after construction
* to confirm that the object is usable.
- * @param audioSource the recording source (also referred to as capture preset).
- * See {@link MediaRecorder.AudioSource} for the capture preset definitions.
+ * @param audioSource the recording source.
+ * See {@link MediaRecorder.AudioSource} for the recording source definitions.
* @param sampleRateInHz the sample rate expressed in Hertz. 44100Hz is currently the only
* rate that is guaranteed to work on all devices, but other rates such as 22050,
* 16000, and 11025 may work on some devices.
@@ -210,9 +255,9 @@ public class AudioRecord
* See {@link AudioFormat#CHANNEL_IN_MONO} and
* {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed
* to work on all devices.
- * @param audioFormat the format in which the audio data is represented.
- * See {@link AudioFormat#ENCODING_PCM_16BIT} and
- * {@link AudioFormat#ENCODING_PCM_8BIT}
+ * @param audioFormat the format in which the audio data is to be returned.
+ * See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
* @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
* to during the recording. New audio data can be read from this buffer in smaller chunks
* than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
@@ -238,11 +283,10 @@ public class AudioRecord
/**
* @hide
- * CANDIDATE FOR PUBLIC API
* Class constructor with {@link AudioAttributes} and {@link AudioFormat}.
* @param attributes a non-null {@link AudioAttributes} instance. Use
- * {@link AudioAttributes.Builder#setCapturePreset(int)} for configuring the capture
- * preset for this instance.
+ * {@link AudioAttributes.Builder#setAudioSource(int)} for configuring the audio
+ * source for this instance.
* @param format a non-null {@link AudioFormat} instance describing the format of the data
* that will be recorded through this AudioRecord. See {@link AudioFormat.Builder} for
* configuring the audio format parameters such as encoding, channel mask and sample rate.
@@ -257,6 +301,7 @@ public class AudioRecord
* construction.
* @throws IllegalArgumentException
*/
+ @SystemApi
public AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
int sessionId) throws IllegalArgumentException {
mRecordingState = RECORDSTATE_STOPPED;
@@ -312,8 +357,19 @@ public class AudioRecord
audioParamCheck(attributes.getCapturePreset(), rate, encoding);
- mChannelCount = AudioFormat.channelCountFromInChannelMask(format.getChannelMask());
- mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ mChannelIndexMask = format.getChannelIndexMask();
+ mChannelCount = format.getChannelCount();
+ }
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
+ mChannelCount = format.getChannelCount();
+ } else if (mChannelIndexMask == 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(AudioFormat.CHANNEL_IN_DEFAULT, false);
+ mChannelCount = AudioFormat.channelCountFromInChannelMask(mChannelMask);
+ }
audioBuffSizeCheck(bufferSizeInBytes);
@@ -322,8 +378,9 @@ public class AudioRecord
//TODO: update native initialization when information about hardware init failure
// due to capture device already open is available.
int initResult = native_setup( new WeakReference<AudioRecord>(this),
- mAudioAttributes, mSampleRate, mChannelMask, mAudioFormat, mNativeBufferSizeInBytes,
- session);
+ mAudioAttributes, mSampleRate, mChannelMask, mChannelIndexMask,
+ mAudioFormat, mNativeBufferSizeInBytes,
+ session, ActivityThread.currentOpPackageName());
if (initResult != SUCCESS) {
loge("Error code "+initResult+" when initializing native AudioRecord object.");
return; // with mState == STATE_UNINITIALIZED
@@ -334,6 +391,186 @@ public class AudioRecord
mState = STATE_INITIALIZED;
}
+ /**
+ * Builder class for {@link AudioRecord} objects.
+ * Use this class to configure and create an <code>AudioRecord</code> instance. By setting the
+ * recording source and audio format parameters, you indicate which of
+ * those vary from the default behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioRecord</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioRecord recorder = new AudioRecord.Builder()
+ * .setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(32000)
+ * .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ * .build())
+ * .setBufferSize(2*minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the audio source is not set with {@link #setAudioSource(int)},
+ * {@link MediaRecorder.AudioSource#DEFAULT} is used.
+ * <br>If the audio format is not specified or is incomplete, its sample rate will be the
+ * default output sample rate of the device (see
+ * {@link AudioManager#PROPERTY_OUTPUT_SAMPLE_RATE}), its channel configuration will be
+ * {@link AudioFormat#CHANNEL_IN_MONO}, and the encoding will be
+ * {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * the minimum buffer size for the source is used.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * @param source the audio source.
+ * See {@link MediaRecorder.AudioSource} for the supported audio source definitions.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setAudioSource(int source) throws IllegalArgumentException {
+ if ( (source < MediaRecorder.AudioSource.DEFAULT) ||
+ (source > MediaRecorder.getAudioSourceMax()) ) {
+ throw new IllegalArgumentException("Invalid audio source " + source);
+ }
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(source)
+ .build();
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components. Allows specifying non-public capture presets
+ * @param attributes a non-null {@link AudioAttributes} instance that contains the capture
+ * preset to be used.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ if (attributes.getCapturePreset() == MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID) {
+ throw new IllegalArgumentException(
+ "No valid capture preset in AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be captured.
+ * @param format a non-null {@link AudioFormat} instance
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setAudioFormat(@NonNull AudioFormat format) throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is written
+ * during the recording. New audio data can be read from this buffer in smaller chunks
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance.
+ * Since bufferSizeInBytes may be internally increased to accommodate the source
+ * requirements, use {@link #getBufferSizeInFrames()} to determine the actual buffer size
+ * in frames.
+ * @param bufferSizeInBytes a value strictly greater than 0
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setBufferSizeInBytes(int bufferSizeInBytes) throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components.
+ * @param sessionId ID of audio session the AudioRecord must be attached to, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at
+ * construction time.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setSessionId(int sessionId) throws IllegalArgumentException {
+ if (sessionId < 0) {
+ throw new IllegalArgumentException("Invalid session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * @return a new {@link AudioRecord} instance initialized with all the parameters set
+ * on this <code>Builder</code>
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device.
+ */
+ public AudioRecord build() throws UnsupportedOperationException {
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ .build();
+ } else {
+ if (mFormat.getEncoding() == AudioFormat.ENCODING_INVALID) {
+ mFormat = new AudioFormat.Builder(mFormat)
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .build();
+ }
+ if (mFormat.getChannelMask() == AudioFormat.CHANNEL_INVALID
+ && mFormat.getChannelIndexMask() == AudioFormat.CHANNEL_INVALID) {
+ mFormat = new AudioFormat.Builder(mFormat)
+ .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ .build();
+ }
+ }
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(MediaRecorder.AudioSource.DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ return new AudioRecord(mAttributes, mFormat, mBufferSizeInBytes, mSessionId);
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
// Convenience method for the constructor's parameter checks.
// This, getChannelMaskFromLegacyConfig and audioBuffSizeCheck are where constructor
// IllegalArgumentException-s are thrown
@@ -376,7 +613,7 @@ public class AudioRecord
// audio source
if ( (audioSource < MediaRecorder.AudioSource.DEFAULT) ||
((audioSource > MediaRecorder.getAudioSourceMax()) &&
- (audioSource != MediaRecorder.AudioSource.FM_TUNER) &&
+ (audioSource != MediaRecorder.AudioSource.RADIO_TUNER) &&
(audioSource != MediaRecorder.AudioSource.HOTWORD)) ) {
throw new IllegalArgumentException("Invalid audio source.");
}
@@ -384,7 +621,7 @@ public class AudioRecord
//--------------
// sample rate
- if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
+ if ((sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX)) {
throw new IllegalArgumentException(sampleRateInHz
+ "Hz is not a supported sample rate.");
}
@@ -396,13 +633,14 @@ public class AudioRecord
case AudioFormat.ENCODING_DEFAULT:
mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
break;
+ case AudioFormat.ENCODING_PCM_FLOAT:
case AudioFormat.ENCODING_PCM_16BIT:
case AudioFormat.ENCODING_PCM_8BIT:
mAudioFormat = audioFormat;
break;
default:
throw new IllegalArgumentException("Unsupported sample encoding."
- + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
+ + " Should be ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, or ENCODING_PCM_FLOAT.");
}
}
@@ -410,7 +648,8 @@ public class AudioRecord
// Convenience method for the contructor's audio buffer size check.
// preconditions:
// mChannelCount is valid
- // mAudioFormat is AudioFormat.ENCODING_PCM_8BIT OR AudioFormat.ENCODING_PCM_16BIT
+ // mAudioFormat is AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT,
+ // or AudioFormat.ENCODING_PCM_FLOAT
// postcondition:
// mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
private void audioBuffSizeCheck(int audioBufferSize) throws IllegalArgumentException {
@@ -469,23 +708,45 @@ public class AudioRecord
}
/**
- * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
- * and {@link AudioFormat#ENCODING_PCM_8BIT}.
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
*/
public int getAudioFormat() {
return mAudioFormat;
}
/**
- * Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_IN_MONO}
+ * Returns the configured channel position mask.
+ * <p> See {@link AudioFormat#CHANNEL_IN_MONO}
* and {@link AudioFormat#CHANNEL_IN_STEREO}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask is used.
+ * Consider {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
*/
public int getChannelConfiguration() {
return mChannelMask;
}
/**
+ * Returns the configured <code>AudioRecord</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioRecord</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelMask != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelMask);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
* Returns the configured number of channels.
*/
public int getChannelCount() {
@@ -516,6 +777,20 @@ public class AudioRecord
}
/**
+ * Returns the frame count of the native <code>AudioRecord</code> buffer.
+ * This is greater than or equal to the bufferSizeInBytes converted to frame units
+ * specified in the <code>AudioRecord</code> constructor or Builder.
+ * The native frame count may be enlarged to accommodate the requirements of the
+ * source on creation or if the <code>AudioRecord</code>
+ * is subsequently rerouted.
+ * @return current size in frames of the <code>AudioRecord</code> buffer.
+ * @throws IllegalStateException
+ */
+ public int getBufferSizeInFrames() {
+ return native_get_buffer_size_in_frames();
+ }
+
+ /**
* Returns the notification marker position expressed in frames.
*/
public int getNotificationMarkerPosition() {
@@ -569,12 +844,6 @@ public class AudioRecord
return ERROR_BAD_VALUE;
}
- // PCM_8BIT is not supported at the moment
- if (audioFormat != AudioFormat.ENCODING_PCM_16BIT) {
- loge("getMinBufferSize(): Invalid audio format.");
- return ERROR_BAD_VALUE;
- }
-
int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
if (size == 0) {
return ERROR_BAD_VALUE;
@@ -678,80 +947,217 @@ public class AudioRecord
// Audio data supply
//--------------------
/**
- * Reads audio data from the audio hardware for recording into a buffer.
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
* @param audioData the array to which the recorded audio data is written.
* @param offsetInBytes index in audioData from which the data is written expressed in bytes.
* @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of bytes will not exceed sizeInBytes.
*/
- public int read(byte[] audioData, int offsetInBytes, int sizeInBytes) {
- if (mState != STATE_INITIALIZED) {
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return read(audioData, offsetInBytes, sizeInBytes, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInBytes index in audioData from which the data is written expressed in bytes.
+ * @param sizeInBytes the number of requested bytes.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
+ */
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
|| (offsetInBytes + sizeInBytes < 0) // detect integer overflow
|| (offsetInBytes + sizeInBytes > audioData.length)) {
return ERROR_BAD_VALUE;
}
- return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes);
+ return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes,
+ readMode == READ_BLOCKING);
}
+ /**
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInShorts index in audioData from which the data is written expressed in shorts.
+ * @param sizeInShorts the number of requested shorts.
+ * @return the number of shorts that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of shorts will not exceed sizeInShorts.
+ */
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return read(audioData, offsetInShorts, sizeInShorts, READ_BLOCKING);
+ }
/**
- * Reads audio data from the audio hardware for recording into a buffer.
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
* @param audioData the array to which the recorded audio data is written.
* @param offsetInShorts index in audioData from which the data is written expressed in shorts.
* @param sizeInShorts the number of requested shorts.
- * @return the number of shorts that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of shorts that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of shorts will not exceed sizeInShorts.
*/
- public int read(short[] audioData, int offsetInShorts, int sizeInShorts) {
- if (mState != STATE_INITIALIZED) {
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
|| (offsetInShorts + sizeInShorts < 0) // detect integer overflow
|| (offsetInShorts + sizeInShorts > audioData.length)) {
return ERROR_BAD_VALUE;
}
- return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts);
+ return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts,
+ readMode == READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a float array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInFloats index in audioData from which the data is written.
+ * @param sizeInFloats the number of requested floats.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of floats that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of floats will not exceed sizeInFloats.
+ */
+ public int read(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
+ @ReadMode int readMode) {
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioRecord.read() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
+ Log.e(TAG, "AudioRecord.read(float[] ...) requires format ENCODING_PCM_FLOAT");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ((audioData == null) || (offsetInFloats < 0) || (sizeInFloats < 0)
+ || (offsetInFloats + sizeInFloats < 0) // detect integer overflow
+ || (offsetInFloats + sizeInFloats > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_float_array(audioData, offsetInFloats, sizeInFloats,
+ readMode == READ_BLOCKING);
}
+ /**
+ * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
+ * is not a direct buffer, this method will always return 0.
+ * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
+ * unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
+ * @param audioBuffer the direct buffer to which the recorded audio data is written.
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
+ * The number of bytes read will truncated to be a multiple of the frame size.
+ */
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes) {
+ return read(audioBuffer, sizeInBytes, READ_BLOCKING);
+ }
/**
* Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
* is not a direct buffer, this method will always return 0.
* Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
* unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
* @param audioBuffer the direct buffer to which the recorded audio data is written.
- * @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of bytes will not exceed sizeInBytes.
+ * The number of bytes read will truncated to be a multiple of the frame size.
*/
- public int read(ByteBuffer audioBuffer, int sizeInBytes) {
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode) {
if (mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioBuffer == null) || (sizeInBytes < 0) ) {
return ERROR_BAD_VALUE;
}
- return native_read_in_direct_buffer(audioBuffer, sizeInBytes);
+ return native_read_in_direct_buffer(audioBuffer, sizeInBytes, readMode == READ_BLOCKING);
}
-
//--------------------------------------------------------------------------
// Initialization / configuration
//--------------------
@@ -768,7 +1174,7 @@ public class AudioRecord
* Sets the listener the AudioRecord notifies when a previously set marker is reached or
* for each periodic record head position update.
* Use this method to receive AudioRecord events in the Handler associated with another
- * thread than the one in which you created the AudioTrack instance.
+ * thread than the one in which you created the AudioRecord instance.
* @param listener
* @param handler the Handler that will receive the event notification messages.
*/
@@ -809,6 +1215,155 @@ public class AudioRecord
}
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Defines the interface by which applications can receive notifications of routing
+ * changes for the associated {@link AudioRecord}.
+ */
+ public interface OnRoutingChangedListener {
+ /**
+ * Called when the routing of an AudioRecord changes from either and explicit or
+ * policy rerouting. Use {@link #getRoutedDevice()} to retrieve the newly routed-from
+ * device.
+ */
+ public void onRoutingChanged(AudioRecord audioRecord);
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioRecord.
+ * Note: The query is only valid if the AudioRecord is currently recording. If it is not,
+ * <code>getRoutedDevice()</code> will return null.
+ */
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_INPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /**
+ * The list of AudioRecord.OnRoutingChangedListener interface added (with
+ * {@link AudioRecord#addOnRoutingChangedListener(OnRoutingChangedListener,android.os.Handler)}
+ * by an app to receive (re)routing notifications.
+ */
+ private ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>
+ mRoutingChangeListeners =
+ new ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>();
+
+ /**
+ * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
+ * on this AudioRecord.
+ * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
+ * of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
+ android.os.Handler handler) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback();
+ }
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener, handler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
+ */
+ public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ if (mRoutingChangeListeners.size() == 0) {
+ native_disableDeviceCallback();
+ }
+ }
+ }
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioRecord record,
+ final OnRoutingChangedListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioRecord was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (record == null) {
+ return;
+ }
+ switch(msg.what) {
+ case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onRoutingChanged(record);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ Collection<NativeRoutingEventHandlerDelegate> values;
+ synchronized (mRoutingChangeListeners) {
+ values = mRoutingChangeListeners.values();
+ }
+ AudioManager.resetAudioPortGeneration();
+ for(NativeRoutingEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+
/**
* Sets the period at which the listener is called, if set with
* {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
@@ -824,6 +1379,44 @@ public class AudioRecord
return native_set_pos_update_period(periodInFrames);
}
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the input to this AudioRecord.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if successful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio input device.
+ */
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSource()) {
+ return false;
+ }
+
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setInputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected input specified by {@link #setPreferredDevice}. Note that this
+ * is not guarenteed to correspond to the actual device being used for recording.
+ */
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
//---------------------------------------------------------
// Interface definitions
@@ -905,6 +1498,11 @@ public class AudioRecord
return;
}
+ if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
+ recorder.broadcastRoutingChange();
+ return;
+ }
+
if (recorder.mEventHandler != null) {
Message m =
recorder.mEventHandler.obtainMessage(what, arg1, arg2, obj);
@@ -920,8 +1518,8 @@ public class AudioRecord
private native final int native_setup(Object audiorecord_this,
Object /*AudioAttributes*/ attributes,
- int sampleRate, int channelMask, int audioFormat,
- int buffSizeInBytes, int[] sessionId);
+ int sampleRate, int channelMask, int channelIndexMask, int audioFormat,
+ int buffSizeInBytes, int[] sessionId, String opPackageName);
// TODO remove: implementation calls directly into implementation of native_release()
private native final void native_finalize();
@@ -933,12 +1531,18 @@ public class AudioRecord
private native final void native_stop();
private native final int native_read_in_byte_array(byte[] audioData,
- int offsetInBytes, int sizeInBytes);
+ int offsetInBytes, int sizeInBytes, boolean isBlocking);
private native final int native_read_in_short_array(short[] audioData,
- int offsetInShorts, int sizeInShorts);
+ int offsetInShorts, int sizeInShorts, boolean isBlocking);
- private native final int native_read_in_direct_buffer(Object jBuffer, int sizeInBytes);
+ private native final int native_read_in_float_array(float[] audioData,
+ int offsetInFloats, int sizeInFloats, boolean isBlocking);
+
+ private native final int native_read_in_direct_buffer(Object jBuffer,
+ int sizeInBytes, boolean isBlocking);
+
+ private native final int native_get_buffer_size_in_frames();
private native final int native_set_marker_pos(int marker);
private native final int native_get_marker_pos();
@@ -949,6 +1553,10 @@ public class AudioRecord
static private native final int native_get_min_buff_size(
int sampleRateInHz, int channelCount, int audioFormat);
+ private native final boolean native_setInputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback();
+ private native final void native_disableDeviceCallback();
//---------------------------------------------------------
// Utility methods
@@ -961,5 +1569,4 @@ public class AudioRecord
private static void loge(String msg) {
Log.e(TAG, msg);
}
-
}
diff --git a/media/java/android/media/AudioRoutesInfo.java b/media/java/android/media/AudioRoutesInfo.java
index df9fc06..6ae0d46 100644
--- a/media/java/android/media/AudioRoutesInfo.java
+++ b/media/java/android/media/AudioRoutesInfo.java
@@ -25,26 +25,27 @@ import android.text.TextUtils;
* @hide
*/
public class AudioRoutesInfo implements Parcelable {
- static final int MAIN_SPEAKER = 0;
- static final int MAIN_HEADSET = 1<<0;
- static final int MAIN_HEADPHONES = 1<<1;
- static final int MAIN_DOCK_SPEAKERS = 1<<2;
- static final int MAIN_HDMI = 1<<3;
+ public static final int MAIN_SPEAKER = 0;
+ public static final int MAIN_HEADSET = 1<<0;
+ public static final int MAIN_HEADPHONES = 1<<1;
+ public static final int MAIN_DOCK_SPEAKERS = 1<<2;
+ public static final int MAIN_HDMI = 1<<3;
+ public static final int MAIN_USB = 1<<4;
- CharSequence mBluetoothName;
- int mMainType = MAIN_SPEAKER;
+ public CharSequence bluetoothName;
+ public int mainType = MAIN_SPEAKER;
public AudioRoutesInfo() {
}
public AudioRoutesInfo(AudioRoutesInfo o) {
- mBluetoothName = o.mBluetoothName;
- mMainType = o.mMainType;
+ bluetoothName = o.bluetoothName;
+ mainType = o.mainType;
}
AudioRoutesInfo(Parcel src) {
- mBluetoothName = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(src);
- mMainType = src.readInt();
+ bluetoothName = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(src);
+ mainType = src.readInt();
}
@Override
@@ -54,8 +55,8 @@ public class AudioRoutesInfo implements Parcelable {
@Override
public void writeToParcel(Parcel dest, int flags) {
- TextUtils.writeToParcel(mBluetoothName, dest, flags);
- dest.writeInt(mMainType);
+ TextUtils.writeToParcel(bluetoothName, dest, flags);
+ dest.writeInt(mainType);
}
public static final Parcelable.Creator<AudioRoutesInfo> CREATOR
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
deleted file mode 100644
index 9b76f89..0000000
--- a/media/java/android/media/AudioService.java
+++ /dev/null
@@ -1,6006 +0,0 @@
-/*
- * Copyright (C) 2006 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import static android.Manifest.permission.REMOTE_AUDIO_PLAYBACK;
-import static android.media.AudioManager.RINGER_MODE_NORMAL;
-import static android.media.AudioManager.RINGER_MODE_SILENT;
-import static android.media.AudioManager.RINGER_MODE_VIBRATE;
-
-import android.app.ActivityManager;
-import android.app.ActivityManagerNative;
-import android.app.AppOpsManager;
-import android.app.KeyguardManager;
-import android.bluetooth.BluetoothA2dp;
-import android.bluetooth.BluetoothAdapter;
-import android.bluetooth.BluetoothClass;
-import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothHeadset;
-import android.bluetooth.BluetoothProfile;
-import android.content.BroadcastReceiver;
-import android.content.ComponentName;
-import android.content.ContentResolver;
-import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.content.pm.PackageManager;
-import android.content.res.Configuration;
-import android.content.res.Resources;
-import android.content.res.XmlResourceParser;
-import android.database.ContentObserver;
-import android.hardware.hdmi.HdmiControlManager;
-import android.hardware.hdmi.HdmiPlaybackClient;
-import android.hardware.hdmi.HdmiTvClient;
-import android.hardware.usb.UsbManager;
-import android.media.MediaPlayer.OnCompletionListener;
-import android.media.MediaPlayer.OnErrorListener;
-import android.media.audiopolicy.AudioMix;
-import android.media.audiopolicy.AudioPolicy;
-import android.media.audiopolicy.AudioPolicyConfig;
-import android.media.audiopolicy.IAudioPolicyCallback;
-import android.os.Binder;
-import android.os.Build;
-import android.os.Environment;
-import android.os.Handler;
-import android.os.IBinder;
-import android.os.Looper;
-import android.os.Message;
-import android.os.PowerManager;
-import android.os.RemoteCallbackList;
-import android.os.RemoteException;
-import android.os.SystemClock;
-import android.os.SystemProperties;
-import android.os.UserHandle;
-import android.os.Vibrator;
-import android.provider.Settings;
-import android.provider.Settings.System;
-import android.telecom.TelecomManager;
-import android.text.TextUtils;
-import android.util.Log;
-import android.util.MathUtils;
-import android.util.Slog;
-import android.view.KeyEvent;
-import android.view.OrientationEventListener;
-import android.view.Surface;
-import android.view.WindowManager;
-import android.view.accessibility.AccessibilityManager;
-
-import com.android.internal.util.XmlUtils;
-import com.android.server.LocalServices;
-
-import org.xmlpull.v1.XmlPullParserException;
-
-import java.io.FileDescriptor;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * The implementation of the volume manager service.
- * <p>
- * This implementation focuses on delivering a responsive UI. Most methods are
- * asynchronous to external calls. For example, the task of setting a volume
- * will update our internal state, but in a separate thread will set the system
- * volume and later persist to the database. Similarly, setting the ringer mode
- * will update the state and broadcast a change and in a separate thread later
- * persist the ringer mode.
- *
- * @hide
- */
-public class AudioService extends IAudioService.Stub {
-
- private static final String TAG = "AudioService";
-
- /** Debug audio mode */
- protected static final boolean DEBUG_MODE = Log.isLoggable(TAG + ".MOD", Log.DEBUG);
-
- /** Debug audio policy feature */
- protected static final boolean DEBUG_AP = Log.isLoggable(TAG + ".AP", Log.DEBUG);
-
- /** Debug volumes */
- protected static final boolean DEBUG_VOL = Log.isLoggable(TAG + ".VOL", Log.DEBUG);
-
- /** debug calls to media session apis */
- private static final boolean DEBUG_SESSIONS = Log.isLoggable(TAG + ".SESSIONS", Log.DEBUG);
-
- /** Allow volume changes to set ringer mode to silent? */
- private static final boolean VOLUME_SETS_RINGER_MODE_SILENT = false;
-
- /** In silent mode, are volume adjustments (raises) prevented? */
- private static final boolean PREVENT_VOLUME_ADJUSTMENT_IF_SILENT = true;
-
- /** How long to delay before persisting a change in volume/ringer mode. */
- private static final int PERSIST_DELAY = 500;
-
- /**
- * The delay before playing a sound. This small period exists so the user
- * can press another key (non-volume keys, too) to have it NOT be audible.
- * <p>
- * PhoneWindow will implement this part.
- */
- public static final int PLAY_SOUND_DELAY = 300;
-
- /**
- * Only used in the result from {@link #checkForRingerModeChange(int, int, int)}
- */
- private static final int FLAG_ADJUST_VOLUME = 1;
-
- private final Context mContext;
- private final ContentResolver mContentResolver;
- private final AppOpsManager mAppOps;
-
- // the platform has no specific capabilities
- private static final int PLATFORM_DEFAULT = 0;
- // the platform is voice call capable (a phone)
- private static final int PLATFORM_VOICE = 1;
- // the platform is a television or a set-top box
- private static final int PLATFORM_TELEVISION = 2;
- // the platform type affects volume and silent mode behavior
- private final int mPlatformType;
-
- private boolean isPlatformVoice() {
- return mPlatformType == PLATFORM_VOICE;
- }
-
- private boolean isPlatformTelevision() {
- return mPlatformType == PLATFORM_TELEVISION;
- }
-
- /** The controller for the volume UI. */
- private final VolumeController mVolumeController = new VolumeController();
-
- // sendMsg() flags
- /** If the msg is already queued, replace it with this one. */
- private static final int SENDMSG_REPLACE = 0;
- /** If the msg is already queued, ignore this one and leave the old. */
- private static final int SENDMSG_NOOP = 1;
- /** If the msg is already queued, queue this one and leave the old. */
- private static final int SENDMSG_QUEUE = 2;
-
- // AudioHandler messages
- private static final int MSG_SET_DEVICE_VOLUME = 0;
- private static final int MSG_PERSIST_VOLUME = 1;
- private static final int MSG_PERSIST_MASTER_VOLUME = 2;
- private static final int MSG_PERSIST_RINGER_MODE = 3;
- private static final int MSG_MEDIA_SERVER_DIED = 4;
- private static final int MSG_PLAY_SOUND_EFFECT = 5;
- private static final int MSG_BTA2DP_DOCK_TIMEOUT = 6;
- private static final int MSG_LOAD_SOUND_EFFECTS = 7;
- private static final int MSG_SET_FORCE_USE = 8;
- private static final int MSG_BT_HEADSET_CNCT_FAILED = 9;
- private static final int MSG_SET_ALL_VOLUMES = 10;
- private static final int MSG_PERSIST_MASTER_VOLUME_MUTE = 11;
- private static final int MSG_REPORT_NEW_ROUTES = 12;
- private static final int MSG_SET_FORCE_BT_A2DP_USE = 13;
- private static final int MSG_CHECK_MUSIC_ACTIVE = 14;
- private static final int MSG_BROADCAST_AUDIO_BECOMING_NOISY = 15;
- private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME = 16;
- private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED = 17;
- private static final int MSG_PERSIST_SAFE_VOLUME_STATE = 18;
- private static final int MSG_BROADCAST_BT_CONNECTION_STATE = 19;
- private static final int MSG_UNLOAD_SOUND_EFFECTS = 20;
- private static final int MSG_SYSTEM_READY = 21;
- private static final int MSG_PERSIST_MUSIC_ACTIVE_MS = 22;
- private static final int MSG_PERSIST_MICROPHONE_MUTE = 23;
- // start of messages handled under wakelock
- // these messages can only be queued, i.e. sent with queueMsgUnderWakeLock(),
- // and not with sendMsg(..., ..., SENDMSG_QUEUE, ...)
- private static final int MSG_SET_WIRED_DEVICE_CONNECTION_STATE = 100;
- private static final int MSG_SET_A2DP_SRC_CONNECTION_STATE = 101;
- private static final int MSG_SET_A2DP_SINK_CONNECTION_STATE = 102;
- // end of messages handled under wakelock
-
- private static final int BTA2DP_DOCK_TIMEOUT_MILLIS = 8000;
- // Timeout for connection to bluetooth headset service
- private static final int BT_HEADSET_CNCT_TIMEOUT_MS = 3000;
-
- /** @see AudioSystemThread */
- private AudioSystemThread mAudioSystemThread;
- /** @see AudioHandler */
- private AudioHandler mAudioHandler;
- /** @see VolumeStreamState */
- private VolumeStreamState[] mStreamStates;
- private SettingsObserver mSettingsObserver;
-
- private int mMode = AudioSystem.MODE_NORMAL;
- // protects mRingerMode
- private final Object mSettingsLock = new Object();
-
- private SoundPool mSoundPool;
- private final Object mSoundEffectsLock = new Object();
- private static final int NUM_SOUNDPOOL_CHANNELS = 4;
-
- // Internally master volume is a float in the 0.0 - 1.0 range,
- // but to support integer based AudioManager API we translate it to 0 - 100
- private static final int MAX_MASTER_VOLUME = 100;
-
- // Maximum volume adjust steps allowed in a single batch call.
- private static final int MAX_BATCH_VOLUME_ADJUST_STEPS = 4;
-
- /* Sound effect file names */
- private static final String SOUND_EFFECTS_PATH = "/media/audio/ui/";
- private static final List<String> SOUND_EFFECT_FILES = new ArrayList<String>();
-
- /* Sound effect file name mapping sound effect id (AudioManager.FX_xxx) to
- * file index in SOUND_EFFECT_FILES[] (first column) and indicating if effect
- * uses soundpool (second column) */
- private final int[][] SOUND_EFFECT_FILES_MAP = new int[AudioManager.NUM_SOUND_EFFECTS][2];
-
- /** @hide Maximum volume index values for audio streams */
- private static int[] MAX_STREAM_VOLUME = new int[] {
- 5, // STREAM_VOICE_CALL
- 7, // STREAM_SYSTEM
- 7, // STREAM_RING
- 15, // STREAM_MUSIC
- 7, // STREAM_ALARM
- 7, // STREAM_NOTIFICATION
- 15, // STREAM_BLUETOOTH_SCO
- 7, // STREAM_SYSTEM_ENFORCED
- 15, // STREAM_DTMF
- 15 // STREAM_TTS
- };
-
- private static int[] DEFAULT_STREAM_VOLUME = new int[] {
- 4, // STREAM_VOICE_CALL
- 7, // STREAM_SYSTEM
- 5, // STREAM_RING
- 11, // STREAM_MUSIC
- 6, // STREAM_ALARM
- 5, // STREAM_NOTIFICATION
- 7, // STREAM_BLUETOOTH_SCO
- 7, // STREAM_SYSTEM_ENFORCED
- 11, // STREAM_DTMF
- 11 // STREAM_TTS
- };
-
- /* mStreamVolumeAlias[] indicates for each stream if it uses the volume settings
- * of another stream: This avoids multiplying the volume settings for hidden
- * stream types that follow other stream behavior for volume settings
- * NOTE: do not create loops in aliases!
- * Some streams alias to different streams according to device category (phone or tablet) or
- * use case (in call vs off call...). See updateStreamVolumeAlias() for more details.
- * mStreamVolumeAlias contains STREAM_VOLUME_ALIAS_VOICE aliases for a voice capable device
- * (phone), STREAM_VOLUME_ALIAS_TELEVISION for a television or set-top box and
- * STREAM_VOLUME_ALIAS_DEFAULT for other devices (e.g. tablets).*/
- private final int[] STREAM_VOLUME_ALIAS_VOICE = new int[] {
- AudioSystem.STREAM_VOICE_CALL, // STREAM_VOICE_CALL
- AudioSystem.STREAM_RING, // STREAM_SYSTEM
- AudioSystem.STREAM_RING, // STREAM_RING
- AudioSystem.STREAM_MUSIC, // STREAM_MUSIC
- AudioSystem.STREAM_ALARM, // STREAM_ALARM
- AudioSystem.STREAM_RING, // STREAM_NOTIFICATION
- AudioSystem.STREAM_BLUETOOTH_SCO, // STREAM_BLUETOOTH_SCO
- AudioSystem.STREAM_RING, // STREAM_SYSTEM_ENFORCED
- AudioSystem.STREAM_RING, // STREAM_DTMF
- AudioSystem.STREAM_MUSIC // STREAM_TTS
- };
- private final int[] STREAM_VOLUME_ALIAS_TELEVISION = new int[] {
- AudioSystem.STREAM_MUSIC, // STREAM_VOICE_CALL
- AudioSystem.STREAM_MUSIC, // STREAM_SYSTEM
- AudioSystem.STREAM_MUSIC, // STREAM_RING
- AudioSystem.STREAM_MUSIC, // STREAM_MUSIC
- AudioSystem.STREAM_MUSIC, // STREAM_ALARM
- AudioSystem.STREAM_MUSIC, // STREAM_NOTIFICATION
- AudioSystem.STREAM_MUSIC, // STREAM_BLUETOOTH_SCO
- AudioSystem.STREAM_MUSIC, // STREAM_SYSTEM_ENFORCED
- AudioSystem.STREAM_MUSIC, // STREAM_DTMF
- AudioSystem.STREAM_MUSIC // STREAM_TTS
- };
- private final int[] STREAM_VOLUME_ALIAS_DEFAULT = new int[] {
- AudioSystem.STREAM_VOICE_CALL, // STREAM_VOICE_CALL
- AudioSystem.STREAM_RING, // STREAM_SYSTEM
- AudioSystem.STREAM_RING, // STREAM_RING
- AudioSystem.STREAM_MUSIC, // STREAM_MUSIC
- AudioSystem.STREAM_ALARM, // STREAM_ALARM
- AudioSystem.STREAM_RING, // STREAM_NOTIFICATION
- AudioSystem.STREAM_BLUETOOTH_SCO, // STREAM_BLUETOOTH_SCO
- AudioSystem.STREAM_RING, // STREAM_SYSTEM_ENFORCED
- AudioSystem.STREAM_RING, // STREAM_DTMF
- AudioSystem.STREAM_MUSIC // STREAM_TTS
- };
- private int[] mStreamVolumeAlias;
-
- /**
- * Map AudioSystem.STREAM_* constants to app ops. This should be used
- * after mapping through mStreamVolumeAlias.
- */
- private static final int[] STEAM_VOLUME_OPS = new int[] {
- AppOpsManager.OP_AUDIO_VOICE_VOLUME, // STREAM_VOICE_CALL
- AppOpsManager.OP_AUDIO_MEDIA_VOLUME, // STREAM_SYSTEM
- AppOpsManager.OP_AUDIO_RING_VOLUME, // STREAM_RING
- AppOpsManager.OP_AUDIO_MEDIA_VOLUME, // STREAM_MUSIC
- AppOpsManager.OP_AUDIO_ALARM_VOLUME, // STREAM_ALARM
- AppOpsManager.OP_AUDIO_NOTIFICATION_VOLUME, // STREAM_NOTIFICATION
- AppOpsManager.OP_AUDIO_BLUETOOTH_VOLUME, // STREAM_BLUETOOTH_SCO
- AppOpsManager.OP_AUDIO_MEDIA_VOLUME, // STREAM_SYSTEM_ENFORCED
- AppOpsManager.OP_AUDIO_MEDIA_VOLUME, // STREAM_DTMF
- AppOpsManager.OP_AUDIO_MEDIA_VOLUME, // STREAM_TTS
- };
-
- private final boolean mUseFixedVolume;
-
- // stream names used by dumpStreamStates()
- private static final String[] STREAM_NAMES = new String[] {
- "STREAM_VOICE_CALL",
- "STREAM_SYSTEM",
- "STREAM_RING",
- "STREAM_MUSIC",
- "STREAM_ALARM",
- "STREAM_NOTIFICATION",
- "STREAM_BLUETOOTH_SCO",
- "STREAM_SYSTEM_ENFORCED",
- "STREAM_DTMF",
- "STREAM_TTS"
- };
-
- private final AudioSystem.ErrorCallback mAudioSystemCallback = new AudioSystem.ErrorCallback() {
- public void onError(int error) {
- switch (error) {
- case AudioSystem.AUDIO_STATUS_SERVER_DIED:
- sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED,
- SENDMSG_NOOP, 0, 0, null, 0);
- break;
- default:
- break;
- }
- }
- };
-
- /**
- * Current ringer mode from one of {@link AudioManager#RINGER_MODE_NORMAL},
- * {@link AudioManager#RINGER_MODE_SILENT}, or
- * {@link AudioManager#RINGER_MODE_VIBRATE}.
- */
- // protected by mSettingsLock
- private int mRingerMode; // internal ringer mode, affects muting of underlying streams
- private int mRingerModeExternal = -1; // reported ringer mode to outside clients (AudioManager)
-
- /** @see System#MODE_RINGER_STREAMS_AFFECTED */
- private int mRingerModeAffectedStreams = 0;
-
- // Streams currently muted by ringer mode
- private int mRingerModeMutedStreams;
-
- /** @see System#MUTE_STREAMS_AFFECTED */
- private int mMuteAffectedStreams;
-
- /**
- * NOTE: setVibrateSetting(), getVibrateSetting(), shouldVibrate() are deprecated.
- * mVibrateSetting is just maintained during deprecation period but vibration policy is
- * now only controlled by mHasVibrator and mRingerMode
- */
- private int mVibrateSetting;
-
- // Is there a vibrator
- private final boolean mHasVibrator;
-
- // Broadcast receiver for device connections intent broadcasts
- private final BroadcastReceiver mReceiver = new AudioServiceBroadcastReceiver();
-
- // Devices currently connected
- private final HashMap <Integer, String> mConnectedDevices = new HashMap <Integer, String>();
-
- // Forced device usage for communications
- private int mForcedUseForComm;
-
- // True if we have master volume support
- private final boolean mUseMasterVolume;
-
- private final int[] mMasterVolumeRamp;
-
- // List of binder death handlers for setMode() client processes.
- // The last process to have called setMode() is at the top of the list.
- private final ArrayList <SetModeDeathHandler> mSetModeDeathHandlers = new ArrayList <SetModeDeathHandler>();
-
- // List of clients having issued a SCO start request
- private final ArrayList <ScoClient> mScoClients = new ArrayList <ScoClient>();
-
- // BluetoothHeadset API to control SCO connection
- private BluetoothHeadset mBluetoothHeadset;
-
- // Bluetooth headset device
- private BluetoothDevice mBluetoothHeadsetDevice;
-
- // Indicate if SCO audio connection is currently active and if the initiator is
- // audio service (internal) or bluetooth headset (external)
- private int mScoAudioState;
- // SCO audio state is not active
- private static final int SCO_STATE_INACTIVE = 0;
- // SCO audio activation request waiting for headset service to connect
- private static final int SCO_STATE_ACTIVATE_REQ = 1;
- // SCO audio state is active or starting due to a request from AudioManager API
- private static final int SCO_STATE_ACTIVE_INTERNAL = 3;
- // SCO audio deactivation request waiting for headset service to connect
- private static final int SCO_STATE_DEACTIVATE_REQ = 5;
-
- // SCO audio state is active due to an action in BT handsfree (either voice recognition or
- // in call audio)
- private static final int SCO_STATE_ACTIVE_EXTERNAL = 2;
- // Deactivation request for all SCO connections (initiated by audio mode change)
- // waiting for headset service to connect
- private static final int SCO_STATE_DEACTIVATE_EXT_REQ = 4;
-
- // Indicates the mode used for SCO audio connection. The mode is virtual call if the request
- // originated from an app targeting an API version before JB MR2 and raw audio after that.
- private int mScoAudioMode;
- // SCO audio mode is undefined
- private static final int SCO_MODE_UNDEFINED = -1;
- // SCO audio mode is virtual voice call (BluetoothHeadset.startScoUsingVirtualVoiceCall())
- private static final int SCO_MODE_VIRTUAL_CALL = 0;
- // SCO audio mode is raw audio (BluetoothHeadset.connectAudio())
- private static final int SCO_MODE_RAW = 1;
- // SCO audio mode is Voice Recognition (BluetoothHeadset.startVoiceRecognition())
- private static final int SCO_MODE_VR = 2;
-
- private static final int SCO_MODE_MAX = 2;
-
- // Current connection state indicated by bluetooth headset
- private int mScoConnectionState;
-
- // true if boot sequence has been completed
- private boolean mSystemReady;
- // listener for SoundPool sample load completion indication
- private SoundPoolCallback mSoundPoolCallBack;
- // thread for SoundPool listener
- private SoundPoolListenerThread mSoundPoolListenerThread;
- // message looper for SoundPool listener
- private Looper mSoundPoolLooper = null;
- // volume applied to sound played with playSoundEffect()
- private static int sSoundEffectVolumeDb;
- // previous volume adjustment direction received by checkForRingerModeChange()
- private int mPrevVolDirection = AudioManager.ADJUST_SAME;
- // Keyguard manager proxy
- private KeyguardManager mKeyguardManager;
- // mVolumeControlStream is set by VolumePanel to temporarily force the stream type which volume
- // is controlled by Vol keys.
- private int mVolumeControlStream = -1;
- private final Object mForceControlStreamLock = new Object();
- // VolumePanel is currently the only client of forceVolumeControlStream() and runs in system
- // server process so in theory it is not necessary to monitor the client death.
- // However it is good to be ready for future evolutions.
- private ForceControlStreamClient mForceControlStreamClient = null;
- // Used to play ringtones outside system_server
- private volatile IRingtonePlayer mRingtonePlayer;
-
- private int mDeviceOrientation = Configuration.ORIENTATION_UNDEFINED;
- private int mDeviceRotation = Surface.ROTATION_0;
-
- // Request to override default use of A2DP for media.
- private boolean mBluetoothA2dpEnabled;
- private final Object mBluetoothA2dpEnabledLock = new Object();
-
- // Monitoring of audio routes. Protected by mCurAudioRoutes.
- final AudioRoutesInfo mCurAudioRoutes = new AudioRoutesInfo();
- final RemoteCallbackList<IAudioRoutesObserver> mRoutesObservers
- = new RemoteCallbackList<IAudioRoutesObserver>();
-
- // Devices for which the volume is fixed and VolumePanel slider should be disabled
- int mFixedVolumeDevices = AudioSystem.DEVICE_OUT_HDMI |
- AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET |
- AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET |
- AudioSystem.DEVICE_OUT_HDMI_ARC |
- AudioSystem.DEVICE_OUT_SPDIF |
- AudioSystem.DEVICE_OUT_AUX_LINE;
- int mFullVolumeDevices = 0;
-
- // TODO merge orientation and rotation
- private final boolean mMonitorOrientation;
- private final boolean mMonitorRotation;
-
- private boolean mDockAudioMediaEnabled = true;
-
- private int mDockState = Intent.EXTRA_DOCK_STATE_UNDOCKED;
-
- // Used when safe volume warning message display is requested by setStreamVolume(). In this
- // case, the new requested volume, stream type and device are stored in mPendingVolumeCommand
- // and used later when/if disableSafeMediaVolume() is called.
- private StreamVolumeCommand mPendingVolumeCommand;
-
- private PowerManager.WakeLock mAudioEventWakeLock;
-
- private final MediaFocusControl mMediaFocusControl;
-
- // Reference to BluetoothA2dp to query for AbsoluteVolume.
- private BluetoothA2dp mA2dp;
- // lock always taken synchronized on mConnectedDevices
- private final Object mA2dpAvrcpLock = new Object();
- // If absolute volume is supported in AVRCP device
- private boolean mAvrcpAbsVolSupported = false;
-
- private AudioOrientationEventListener mOrientationListener;
-
- private static Long mLastDeviceConnectMsgTime = new Long(0);
-
- private AudioManagerInternal.RingerModeDelegate mRingerModeDelegate;
-
- ///////////////////////////////////////////////////////////////////////////
- // Construction
- ///////////////////////////////////////////////////////////////////////////
-
- /** @hide */
- public AudioService(Context context) {
- mContext = context;
- mContentResolver = context.getContentResolver();
- mAppOps = (AppOpsManager)context.getSystemService(Context.APP_OPS_SERVICE);
-
- if (mContext.getResources().getBoolean(
- com.android.internal.R.bool.config_voice_capable)) {
- mPlatformType = PLATFORM_VOICE;
- } else if (context.getPackageManager().hasSystemFeature(
- PackageManager.FEATURE_LEANBACK)) {
- mPlatformType = PLATFORM_TELEVISION;
- } else {
- mPlatformType = PLATFORM_DEFAULT;
- }
-
- PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE);
- mAudioEventWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "handleAudioEvent");
-
- Vibrator vibrator = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE);
- mHasVibrator = vibrator == null ? false : vibrator.hasVibrator();
-
- // Intialized volume
- int maxVolume = SystemProperties.getInt("ro.config.vc_call_vol_steps",
- MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]);
- if (maxVolume != MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]) {
- MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = maxVolume;
- DEFAULT_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = (maxVolume * 3) / 4;
- }
- maxVolume = SystemProperties.getInt("ro.config.media_vol_steps",
- MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]);
- if (maxVolume != MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]) {
- MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC] = maxVolume;
- DEFAULT_STREAM_VOLUME[AudioSystem.STREAM_MUSIC] = (maxVolume * 3) / 4;
- }
-
- sSoundEffectVolumeDb = context.getResources().getInteger(
- com.android.internal.R.integer.config_soundEffectVolumeDb);
-
- mForcedUseForComm = AudioSystem.FORCE_NONE;
-
- createAudioSystemThread();
-
- mMediaFocusControl = new MediaFocusControl(mAudioHandler.getLooper(),
- mContext, mVolumeController, this);
-
- AudioSystem.setErrorCallback(mAudioSystemCallback);
-
- boolean cameraSoundForced = mContext.getResources().getBoolean(
- com.android.internal.R.bool.config_camera_sound_forced);
- mCameraSoundForced = new Boolean(cameraSoundForced);
- sendMsg(mAudioHandler,
- MSG_SET_FORCE_USE,
- SENDMSG_QUEUE,
- AudioSystem.FOR_SYSTEM,
- cameraSoundForced ?
- AudioSystem.FORCE_SYSTEM_ENFORCED : AudioSystem.FORCE_NONE,
- null,
- 0);
-
- mSafeMediaVolumeState = new Integer(Settings.Global.getInt(mContentResolver,
- Settings.Global.AUDIO_SAFE_VOLUME_STATE,
- SAFE_MEDIA_VOLUME_NOT_CONFIGURED));
- // The default safe volume index read here will be replaced by the actual value when
- // the mcc is read by onConfigureSafeVolume()
- mSafeMediaVolumeIndex = mContext.getResources().getInteger(
- com.android.internal.R.integer.config_safe_media_volume_index) * 10;
-
- mUseFixedVolume = mContext.getResources().getBoolean(
- com.android.internal.R.bool.config_useFixedVolume);
- mUseMasterVolume = context.getResources().getBoolean(
- com.android.internal.R.bool.config_useMasterVolume);
- mMasterVolumeRamp = context.getResources().getIntArray(
- com.android.internal.R.array.config_masterVolumeRamp);
-
- // must be called before readPersistedSettings() which needs a valid mStreamVolumeAlias[]
- // array initialized by updateStreamVolumeAlias()
- updateStreamVolumeAlias(false /*updateVolumes*/);
- readPersistedSettings();
- mSettingsObserver = new SettingsObserver();
- createStreamStates();
-
- readAndSetLowRamDevice();
-
- // Call setRingerModeInt() to apply correct mute
- // state on streams affected by ringer mode.
- mRingerModeMutedStreams = 0;
- setRingerModeInt(getRingerModeInternal(), false);
-
- // Register for device connection intent broadcasts.
- IntentFilter intentFilter =
- new IntentFilter(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
- intentFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
- intentFilter.addAction(Intent.ACTION_DOCK_EVENT);
- intentFilter.addAction(AudioManager.ACTION_USB_AUDIO_ACCESSORY_PLUG);
- intentFilter.addAction(AudioManager.ACTION_USB_AUDIO_DEVICE_PLUG);
- intentFilter.addAction(Intent.ACTION_SCREEN_ON);
- intentFilter.addAction(Intent.ACTION_SCREEN_OFF);
- intentFilter.addAction(Intent.ACTION_USER_SWITCHED);
- intentFilter.addAction(UsbManager.ACTION_USB_DEVICE_ATTACHED);
-
- intentFilter.addAction(Intent.ACTION_CONFIGURATION_CHANGED);
- // TODO merge orientation and rotation
- mMonitorOrientation = SystemProperties.getBoolean("ro.audio.monitorOrientation", false);
- if (mMonitorOrientation) {
- Log.v(TAG, "monitoring device orientation");
- // initialize orientation in AudioSystem
- setOrientationForAudioSystem();
- }
- mMonitorRotation = SystemProperties.getBoolean("ro.audio.monitorRotation", false);
- if (mMonitorRotation) {
- mDeviceRotation = ((WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE))
- .getDefaultDisplay().getRotation();
- Log.v(TAG, "monitoring device rotation, initial=" + mDeviceRotation);
-
- mOrientationListener = new AudioOrientationEventListener(mContext);
- mOrientationListener.enable();
-
- // initialize rotation in AudioSystem
- setRotationForAudioSystem();
- }
-
- context.registerReceiver(mReceiver, intentFilter);
-
- restoreMasterVolume();
-
- LocalServices.addService(AudioManagerInternal.class, new AudioServiceInternal());
- }
-
- public void systemReady() {
- sendMsg(mAudioHandler, MSG_SYSTEM_READY, SENDMSG_QUEUE,
- 0, 0, null, 0);
- }
-
- public void onSystemReady() {
- mSystemReady = true;
- sendMsg(mAudioHandler, MSG_LOAD_SOUND_EFFECTS, SENDMSG_QUEUE,
- 0, 0, null, 0);
-
- mKeyguardManager =
- (KeyguardManager) mContext.getSystemService(Context.KEYGUARD_SERVICE);
- mScoConnectionState = AudioManager.SCO_AUDIO_STATE_ERROR;
- resetBluetoothSco();
- getBluetoothHeadset();
- //FIXME: this is to maintain compatibility with deprecated intent
- // AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED. Remove when appropriate.
- Intent newIntent = new Intent(AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED);
- newIntent.putExtra(AudioManager.EXTRA_SCO_AUDIO_STATE,
- AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- sendStickyBroadcastToAll(newIntent);
-
- BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
- if (adapter != null) {
- adapter.getProfileProxy(mContext, mBluetoothProfileServiceListener,
- BluetoothProfile.A2DP);
- }
-
- mHdmiManager =
- (HdmiControlManager) mContext.getSystemService(Context.HDMI_CONTROL_SERVICE);
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- mHdmiTvClient = mHdmiManager.getTvClient();
- if (mHdmiTvClient != null) {
- mFixedVolumeDevices &= ~AudioSystem.DEVICE_ALL_HDMI_SYSTEM_AUDIO_AND_SPEAKER;
- }
- mHdmiPlaybackClient = mHdmiManager.getPlaybackClient();
- mHdmiCecSink = false;
- }
- }
-
- sendMsg(mAudioHandler,
- MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- SAFE_VOLUME_CONFIGURE_TIMEOUT_MS);
-
- StreamOverride.init(mContext);
- }
-
- private void createAudioSystemThread() {
- mAudioSystemThread = new AudioSystemThread();
- mAudioSystemThread.start();
- waitForAudioHandlerCreation();
- }
-
- /** Waits for the volume handler to be created by the other thread. */
- private void waitForAudioHandlerCreation() {
- synchronized(this) {
- while (mAudioHandler == null) {
- try {
- // Wait for mAudioHandler to be set by the other thread
- wait();
- } catch (InterruptedException e) {
- Log.e(TAG, "Interrupted while waiting on volume handler.");
- }
- }
- }
- }
-
- private void checkAllAliasStreamVolumes() {
- synchronized (VolumeStreamState.class) {
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = 0; streamType < numStreamTypes; streamType++) {
- if (streamType != mStreamVolumeAlias[streamType]) {
- mStreamStates[streamType].
- setAllIndexes(mStreamStates[mStreamVolumeAlias[streamType]]);
- }
- // apply stream volume
- if (!mStreamStates[streamType].isMuted_syncVSS()) {
- mStreamStates[streamType].applyAllVolumes();
- }
- }
- }
- }
-
- private void checkAllFixedVolumeDevices()
- {
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = 0; streamType < numStreamTypes; streamType++) {
- mStreamStates[streamType].checkFixedVolumeDevices();
- }
- }
-
- private void checkAllFixedVolumeDevices(int streamType) {
- mStreamStates[streamType].checkFixedVolumeDevices();
- }
-
- private void createStreamStates() {
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- VolumeStreamState[] streams = mStreamStates = new VolumeStreamState[numStreamTypes];
-
- for (int i = 0; i < numStreamTypes; i++) {
- streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[mStreamVolumeAlias[i]], i);
- }
-
- checkAllFixedVolumeDevices();
- checkAllAliasStreamVolumes();
- }
-
- private void dumpStreamStates(PrintWriter pw) {
- pw.println("\nStream volumes (device: index)");
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int i = 0; i < numStreamTypes; i++) {
- pw.println("- "+STREAM_NAMES[i]+":");
- mStreamStates[i].dump(pw);
- pw.println("");
- }
- pw.print("\n- mute affected streams = 0x");
- pw.println(Integer.toHexString(mMuteAffectedStreams));
- }
-
- /** @hide */
- public static String streamToString(int stream) {
- if (stream >= 0 && stream < STREAM_NAMES.length) return STREAM_NAMES[stream];
- if (stream == AudioManager.USE_DEFAULT_STREAM_TYPE) return "USE_DEFAULT_STREAM_TYPE";
- return "UNKNOWN_STREAM_" + stream;
- }
-
- private void updateStreamVolumeAlias(boolean updateVolumes) {
- int dtmfStreamAlias;
-
- switch (mPlatformType) {
- case PLATFORM_VOICE:
- mStreamVolumeAlias = STREAM_VOLUME_ALIAS_VOICE;
- dtmfStreamAlias = AudioSystem.STREAM_RING;
- break;
- case PLATFORM_TELEVISION:
- mStreamVolumeAlias = STREAM_VOLUME_ALIAS_TELEVISION;
- dtmfStreamAlias = AudioSystem.STREAM_MUSIC;
- break;
- default:
- mStreamVolumeAlias = STREAM_VOLUME_ALIAS_DEFAULT;
- dtmfStreamAlias = AudioSystem.STREAM_MUSIC;
- }
-
- if (isPlatformTelevision()) {
- mRingerModeAffectedStreams = 0;
- } else {
- if (isInCommunication()) {
- dtmfStreamAlias = AudioSystem.STREAM_VOICE_CALL;
- mRingerModeAffectedStreams &= ~(1 << AudioSystem.STREAM_DTMF);
- } else {
- mRingerModeAffectedStreams |= (1 << AudioSystem.STREAM_DTMF);
- }
- }
-
- mStreamVolumeAlias[AudioSystem.STREAM_DTMF] = dtmfStreamAlias;
- if (updateVolumes) {
- mStreamStates[AudioSystem.STREAM_DTMF].setAllIndexes(mStreamStates[dtmfStreamAlias]);
- // apply stream mute states according to new value of mRingerModeAffectedStreams
- setRingerModeInt(getRingerModeInternal(), false);
- sendMsg(mAudioHandler,
- MSG_SET_ALL_VOLUMES,
- SENDMSG_QUEUE,
- 0,
- 0,
- mStreamStates[AudioSystem.STREAM_DTMF], 0);
- }
- }
-
- private void readDockAudioSettings(ContentResolver cr)
- {
- mDockAudioMediaEnabled = Settings.Global.getInt(
- cr, Settings.Global.DOCK_AUDIO_MEDIA_ENABLED, 0) == 1;
-
- if (mDockAudioMediaEnabled) {
- mBecomingNoisyIntentDevices |= AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET;
- } else {
- mBecomingNoisyIntentDevices &= ~AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET;
- }
-
- sendMsg(mAudioHandler,
- MSG_SET_FORCE_USE,
- SENDMSG_QUEUE,
- AudioSystem.FOR_DOCK,
- mDockAudioMediaEnabled ?
- AudioSystem.FORCE_ANALOG_DOCK : AudioSystem.FORCE_NONE,
- null,
- 0);
- }
-
- private void readPersistedSettings() {
- final ContentResolver cr = mContentResolver;
-
- int ringerModeFromSettings =
- Settings.Global.getInt(
- cr, Settings.Global.MODE_RINGER, AudioManager.RINGER_MODE_NORMAL);
- int ringerMode = ringerModeFromSettings;
- // sanity check in case the settings are restored from a device with incompatible
- // ringer modes
- if (!isValidRingerMode(ringerMode)) {
- ringerMode = AudioManager.RINGER_MODE_NORMAL;
- }
- if ((ringerMode == AudioManager.RINGER_MODE_VIBRATE) && !mHasVibrator) {
- ringerMode = AudioManager.RINGER_MODE_SILENT;
- }
- if (ringerMode != ringerModeFromSettings) {
- Settings.Global.putInt(cr, Settings.Global.MODE_RINGER, ringerMode);
- }
- if (mUseFixedVolume || isPlatformTelevision()) {
- ringerMode = AudioManager.RINGER_MODE_NORMAL;
- }
- synchronized(mSettingsLock) {
- mRingerMode = ringerMode;
- if (mRingerModeExternal == -1) {
- mRingerModeExternal = mRingerMode;
- }
-
- // System.VIBRATE_ON is not used any more but defaults for mVibrateSetting
- // are still needed while setVibrateSetting() and getVibrateSetting() are being
- // deprecated.
- mVibrateSetting = getValueForVibrateSetting(0,
- AudioManager.VIBRATE_TYPE_NOTIFICATION,
- mHasVibrator ? AudioManager.VIBRATE_SETTING_ONLY_SILENT
- : AudioManager.VIBRATE_SETTING_OFF);
- mVibrateSetting = getValueForVibrateSetting(mVibrateSetting,
- AudioManager.VIBRATE_TYPE_RINGER,
- mHasVibrator ? AudioManager.VIBRATE_SETTING_ONLY_SILENT
- : AudioManager.VIBRATE_SETTING_OFF);
-
- updateRingerModeAffectedStreams();
- readDockAudioSettings(cr);
- }
-
- mMuteAffectedStreams = System.getIntForUser(cr,
- System.MUTE_STREAMS_AFFECTED,
- ((1 << AudioSystem.STREAM_MUSIC)|
- (1 << AudioSystem.STREAM_RING)|
- (1 << AudioSystem.STREAM_SYSTEM)),
- UserHandle.USER_CURRENT);
-
- boolean masterMute = System.getIntForUser(cr, System.VOLUME_MASTER_MUTE,
- 0, UserHandle.USER_CURRENT) == 1;
- if (mUseFixedVolume) {
- masterMute = false;
- AudioSystem.setMasterVolume(1.0f);
- }
- AudioSystem.setMasterMute(masterMute);
- broadcastMasterMuteStatus(masterMute);
-
- boolean microphoneMute =
- System.getIntForUser(cr, System.MICROPHONE_MUTE, 0, UserHandle.USER_CURRENT) == 1;
- AudioSystem.muteMicrophone(microphoneMute);
-
- // Each stream will read its own persisted settings
-
- // Broadcast the sticky intents
- broadcastRingerMode(AudioManager.RINGER_MODE_CHANGED_ACTION, mRingerModeExternal);
- broadcastRingerMode(AudioManager.INTERNAL_RINGER_MODE_CHANGED_ACTION, mRingerMode);
-
- // Broadcast vibrate settings
- broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_RINGER);
- broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_NOTIFICATION);
-
- // Load settings for the volume controller
- mVolumeController.loadSettings(cr);
- }
-
- private int rescaleIndex(int index, int srcStream, int dstStream) {
- return (index * mStreamStates[dstStream].getMaxIndex() + mStreamStates[srcStream].getMaxIndex() / 2) / mStreamStates[srcStream].getMaxIndex();
- }
-
- private class AudioOrientationEventListener
- extends OrientationEventListener {
- public AudioOrientationEventListener(Context context) {
- super(context);
- }
-
- @Override
- public void onOrientationChanged(int orientation) {
- //Even though we're responding to phone orientation events,
- //use display rotation so audio stays in sync with video/dialogs
- int newRotation = ((WindowManager) mContext.getSystemService(
- Context.WINDOW_SERVICE)).getDefaultDisplay().getRotation();
- if (newRotation != mDeviceRotation) {
- mDeviceRotation = newRotation;
- setRotationForAudioSystem();
- }
- }
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // IPC methods
- ///////////////////////////////////////////////////////////////////////////
- /** @see AudioManager#adjustVolume(int, int) */
- public void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags,
- String callingPackage) {
- adjustSuggestedStreamVolume(direction, suggestedStreamType, flags, callingPackage,
- Binder.getCallingUid());
- }
-
- private void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags,
- String callingPackage, int uid) {
- if (DEBUG_VOL) Log.d(TAG, "adjustSuggestedStreamVolume() stream="+suggestedStreamType
- + ", flags=" + flags);
- int streamType;
- if (mVolumeControlStream != -1) {
- streamType = mVolumeControlStream;
- } else {
- streamType = getActiveStreamType(suggestedStreamType);
- }
- final int resolvedStream = mStreamVolumeAlias[streamType];
-
- // Play sounds on STREAM_RING only.
- if ((flags & AudioManager.FLAG_PLAY_SOUND) != 0 &&
- resolvedStream != AudioSystem.STREAM_RING) {
- flags &= ~AudioManager.FLAG_PLAY_SOUND;
- }
-
- // For notifications/ring, show the ui before making any adjustments
- if (mVolumeController.suppressAdjustment(resolvedStream, flags)) {
- direction = 0;
- flags &= ~AudioManager.FLAG_PLAY_SOUND;
- flags &= ~AudioManager.FLAG_VIBRATE;
- if (DEBUG_VOL) Log.d(TAG, "Volume controller suppressed adjustment");
- }
-
- adjustStreamVolume(streamType, direction, flags, callingPackage, uid);
- }
-
- /** @see AudioManager#adjustStreamVolume(int, int, int) */
- public void adjustStreamVolume(int streamType, int direction, int flags,
- String callingPackage) {
- adjustStreamVolume(streamType, direction, flags, callingPackage, Binder.getCallingUid());
- }
-
- private void adjustStreamVolume(int streamType, int direction, int flags,
- String callingPackage, int uid) {
- if (mUseFixedVolume) {
- return;
- }
- if (DEBUG_VOL) Log.d(TAG, "adjustStreamVolume() stream="+streamType+", dir="+direction
- + ", flags="+flags);
-
- ensureValidDirection(direction);
- ensureValidStreamType(streamType);
-
- // use stream type alias here so that streams with same alias have the same behavior,
- // including with regard to silent mode control (e.g the use of STREAM_RING below and in
- // checkForRingerModeChange() in place of STREAM_RING or STREAM_NOTIFICATION)
- int streamTypeAlias = mStreamVolumeAlias[streamType];
- VolumeStreamState streamState = mStreamStates[streamTypeAlias];
-
- final int device = getDeviceForStream(streamTypeAlias);
-
- int aliasIndex = streamState.getIndex(device);
- boolean adjustVolume = true;
- int step;
-
- // skip a2dp absolute volume control request when the device
- // is not an a2dp device
- if ((device & AudioSystem.DEVICE_OUT_ALL_A2DP) == 0 &&
- (flags & AudioManager.FLAG_BLUETOOTH_ABS_VOLUME) != 0) {
- return;
- }
-
- if (mAppOps.noteOp(STEAM_VOLUME_OPS[streamTypeAlias], uid, callingPackage)
- != AppOpsManager.MODE_ALLOWED) {
- return;
- }
-
- // reset any pending volume command
- synchronized (mSafeMediaVolumeState) {
- mPendingVolumeCommand = null;
- }
-
- flags &= ~AudioManager.FLAG_FIXED_VOLUME;
- if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) &&
- ((device & mFixedVolumeDevices) != 0)) {
- flags |= AudioManager.FLAG_FIXED_VOLUME;
-
- // Always toggle between max safe volume and 0 for fixed volume devices where safe
- // volume is enforced, and max and 0 for the others.
- // This is simulated by stepping by the full allowed volume range
- if (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_ACTIVE &&
- (device & mSafeMediaVolumeDevices) != 0) {
- step = mSafeMediaVolumeIndex;
- } else {
- step = streamState.getMaxIndex();
- }
- if (aliasIndex != 0) {
- aliasIndex = step;
- }
- } else {
- // convert one UI step (+/-1) into a number of internal units on the stream alias
- step = rescaleIndex(10, streamType, streamTypeAlias);
- }
-
- // If either the client forces allowing ringer modes for this adjustment,
- // or the stream type is one that is affected by ringer modes
- if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
- (streamTypeAlias == getMasterStreamType())) {
- int ringerMode = getRingerModeInternal();
- // do not vibrate if already in vibrate mode
- if (ringerMode == AudioManager.RINGER_MODE_VIBRATE) {
- flags &= ~AudioManager.FLAG_VIBRATE;
- }
- // Check if the ringer mode changes with this volume adjustment. If
- // it does, it will handle adjusting the volume, so we won't below
- final int result = checkForRingerModeChange(aliasIndex, direction, step);
- adjustVolume = (result & FLAG_ADJUST_VOLUME) != 0;
- // If suppressing a volume adjustment in silent mode, display the UI hint
- if ((result & AudioManager.FLAG_SHOW_SILENT_HINT) != 0) {
- flags |= AudioManager.FLAG_SHOW_SILENT_HINT;
- }
- // If suppressing a volume down adjustment in vibrate mode, display the UI hint
- if ((result & AudioManager.FLAG_SHOW_VIBRATE_HINT) != 0) {
- flags |= AudioManager.FLAG_SHOW_VIBRATE_HINT;
- }
- }
-
- int oldIndex = mStreamStates[streamType].getIndex(device);
-
- if (adjustVolume && (direction != AudioManager.ADJUST_SAME)) {
-
- // Check if volume update should be send to AVRCP
- if (streamTypeAlias == AudioSystem.STREAM_MUSIC &&
- (device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 &&
- (flags & AudioManager.FLAG_BLUETOOTH_ABS_VOLUME) == 0) {
- synchronized (mA2dpAvrcpLock) {
- if (mA2dp != null && mAvrcpAbsVolSupported) {
- mA2dp.adjustAvrcpAbsoluteVolume(direction);
- }
- }
- }
-
- if ((direction == AudioManager.ADJUST_RAISE) &&
- !checkSafeMediaVolume(streamTypeAlias, aliasIndex + step, device)) {
- Log.e(TAG, "adjustStreamVolume() safe volume index = "+oldIndex);
- mVolumeController.postDisplaySafeVolumeWarning(flags);
- } else if (streamState.adjustIndex(direction * step, device)) {
- // Post message to set system volume (it in turn will post a message
- // to persist). Do not change volume if stream is muted.
- sendMsg(mAudioHandler,
- MSG_SET_DEVICE_VOLUME,
- SENDMSG_QUEUE,
- device,
- 0,
- streamState,
- 0);
- }
-
- // Check if volume update should be send to Hdmi system audio.
- int newIndex = mStreamStates[streamType].getIndex(device);
- if (streamTypeAlias == AudioSystem.STREAM_MUSIC) {
- setSystemAudioVolume(oldIndex, newIndex, getStreamMaxVolume(streamType), flags);
- }
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- // mHdmiCecSink true => mHdmiPlaybackClient != null
- if (mHdmiCecSink &&
- streamTypeAlias == AudioSystem.STREAM_MUSIC &&
- oldIndex != newIndex) {
- synchronized (mHdmiPlaybackClient) {
- int keyCode = (direction == -1) ? KeyEvent.KEYCODE_VOLUME_DOWN :
- KeyEvent.KEYCODE_VOLUME_UP;
- mHdmiPlaybackClient.sendKeyEvent(keyCode, true);
- mHdmiPlaybackClient.sendKeyEvent(keyCode, false);
- }
- }
- }
- }
- }
- int index = mStreamStates[streamType].getIndex(device);
- sendVolumeUpdate(streamType, oldIndex, index, flags);
- }
-
- private void setSystemAudioVolume(int oldVolume, int newVolume, int maxVolume, int flags) {
- if (mHdmiManager == null
- || mHdmiTvClient == null
- || oldVolume == newVolume
- || (flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) != 0) return;
-
- // Sets the audio volume of AVR when we are in system audio mode. The new volume info
- // is tranformed to HDMI-CEC commands and passed through CEC bus.
- synchronized (mHdmiManager) {
- if (!mHdmiSystemAudioSupported) return;
- synchronized (mHdmiTvClient) {
- final long token = Binder.clearCallingIdentity();
- try {
- mHdmiTvClient.setSystemAudioVolume(
- (oldVolume + 5) / 10, (newVolume + 5) / 10, maxVolume);
- } finally {
- Binder.restoreCallingIdentity(token);
- }
- }
- }
- }
-
- /** @see AudioManager#adjustMasterVolume(int, int) */
- public void adjustMasterVolume(int steps, int flags, String callingPackage) {
- adjustMasterVolume(steps, flags, callingPackage, Binder.getCallingUid());
- }
-
- public void adjustMasterVolume(int steps, int flags, String callingPackage, int uid) {
- if (mUseFixedVolume) {
- return;
- }
- ensureValidSteps(steps);
- int volume = Math.round(AudioSystem.getMasterVolume() * MAX_MASTER_VOLUME);
- int delta = 0;
- int numSteps = Math.abs(steps);
- int direction = steps > 0 ? AudioManager.ADJUST_RAISE : AudioManager.ADJUST_LOWER;
- for (int i = 0; i < numSteps; ++i) {
- delta = findVolumeDelta(direction, volume);
- volume += delta;
- }
-
- //Log.d(TAG, "adjustMasterVolume volume: " + volume + " steps: " + steps);
- setMasterVolume(volume, flags, callingPackage, uid);
- }
-
- // StreamVolumeCommand contains the information needed to defer the process of
- // setStreamVolume() in case the user has to acknowledge the safe volume warning message.
- class StreamVolumeCommand {
- public final int mStreamType;
- public final int mIndex;
- public final int mFlags;
- public final int mDevice;
-
- StreamVolumeCommand(int streamType, int index, int flags, int device) {
- mStreamType = streamType;
- mIndex = index;
- mFlags = flags;
- mDevice = device;
- }
-
- @Override
- public String toString() {
- return new StringBuilder().append("{streamType=").append(mStreamType).append(",index=")
- .append(mIndex).append(",flags=").append(mFlags).append(",device=")
- .append(mDevice).append('}').toString();
- }
- };
-
- private void onSetStreamVolume(int streamType, int index, int flags, int device) {
- setStreamVolumeInt(mStreamVolumeAlias[streamType], index, device, false);
- // setting volume on master stream type also controls silent mode
- if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
- (mStreamVolumeAlias[streamType] == getMasterStreamType())) {
- int newRingerMode;
- if (index == 0) {
- newRingerMode = mHasVibrator ? AudioManager.RINGER_MODE_VIBRATE
- : VOLUME_SETS_RINGER_MODE_SILENT ? AudioManager.RINGER_MODE_SILENT
- : AudioManager.RINGER_MODE_NORMAL;
- } else {
- newRingerMode = AudioManager.RINGER_MODE_NORMAL;
- }
- setRingerMode(newRingerMode, TAG + ".onSetStreamVolume", false /*external*/);
- }
- }
-
- /** @see AudioManager#setStreamVolume(int, int, int) */
- public void setStreamVolume(int streamType, int index, int flags, String callingPackage) {
- setStreamVolume(streamType, index, flags, callingPackage, Binder.getCallingUid());
- }
-
- private void setStreamVolume(int streamType, int index, int flags, String callingPackage,
- int uid) {
- if (mUseFixedVolume) {
- return;
- }
-
- ensureValidStreamType(streamType);
- int streamTypeAlias = mStreamVolumeAlias[streamType];
- VolumeStreamState streamState = mStreamStates[streamTypeAlias];
-
- final int device = getDeviceForStream(streamType);
- int oldIndex;
-
- // skip a2dp absolute volume control request when the device
- // is not an a2dp device
- if ((device & AudioSystem.DEVICE_OUT_ALL_A2DP) == 0 &&
- (flags & AudioManager.FLAG_BLUETOOTH_ABS_VOLUME) != 0) {
- return;
- }
-
- if (mAppOps.noteOp(STEAM_VOLUME_OPS[streamTypeAlias], uid, callingPackage)
- != AppOpsManager.MODE_ALLOWED) {
- return;
- }
-
- synchronized (mSafeMediaVolumeState) {
- // reset any pending volume command
- mPendingVolumeCommand = null;
-
- oldIndex = streamState.getIndex(device);
-
- index = rescaleIndex(index * 10, streamType, streamTypeAlias);
-
- if (streamTypeAlias == AudioSystem.STREAM_MUSIC &&
- (device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 &&
- (flags & AudioManager.FLAG_BLUETOOTH_ABS_VOLUME) == 0) {
- synchronized (mA2dpAvrcpLock) {
- if (mA2dp != null && mAvrcpAbsVolSupported) {
- mA2dp.setAvrcpAbsoluteVolume(index / 10);
- }
- }
- }
-
- if (streamTypeAlias == AudioSystem.STREAM_MUSIC) {
- setSystemAudioVolume(oldIndex, index, getStreamMaxVolume(streamType), flags);
- }
-
- flags &= ~AudioManager.FLAG_FIXED_VOLUME;
- if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) &&
- ((device & mFixedVolumeDevices) != 0)) {
- flags |= AudioManager.FLAG_FIXED_VOLUME;
-
- // volume is either 0 or max allowed for fixed volume devices
- if (index != 0) {
- if (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_ACTIVE &&
- (device & mSafeMediaVolumeDevices) != 0) {
- index = mSafeMediaVolumeIndex;
- } else {
- index = streamState.getMaxIndex();
- }
- }
- }
-
- if (!checkSafeMediaVolume(streamTypeAlias, index, device)) {
- mVolumeController.postDisplaySafeVolumeWarning(flags);
- mPendingVolumeCommand = new StreamVolumeCommand(
- streamType, index, flags, device);
- } else {
- onSetStreamVolume(streamType, index, flags, device);
- index = mStreamStates[streamType].getIndex(device);
- }
- }
- sendVolumeUpdate(streamType, oldIndex, index, flags);
- }
-
- /** @see AudioManager#forceVolumeControlStream(int) */
- public void forceVolumeControlStream(int streamType, IBinder cb) {
- synchronized(mForceControlStreamLock) {
- mVolumeControlStream = streamType;
- if (mVolumeControlStream == -1) {
- if (mForceControlStreamClient != null) {
- mForceControlStreamClient.release();
- mForceControlStreamClient = null;
- }
- } else {
- mForceControlStreamClient = new ForceControlStreamClient(cb);
- }
- }
- }
-
- private class ForceControlStreamClient implements IBinder.DeathRecipient {
- private IBinder mCb; // To be notified of client's death
-
- ForceControlStreamClient(IBinder cb) {
- if (cb != null) {
- try {
- cb.linkToDeath(this, 0);
- } catch (RemoteException e) {
- // Client has died!
- Log.w(TAG, "ForceControlStreamClient() could not link to "+cb+" binder death");
- cb = null;
- }
- }
- mCb = cb;
- }
-
- public void binderDied() {
- synchronized(mForceControlStreamLock) {
- Log.w(TAG, "SCO client died");
- if (mForceControlStreamClient != this) {
- Log.w(TAG, "unregistered control stream client died");
- } else {
- mForceControlStreamClient = null;
- mVolumeControlStream = -1;
- }
- }
- }
-
- public void release() {
- if (mCb != null) {
- mCb.unlinkToDeath(this, 0);
- mCb = null;
- }
- }
- }
-
- private int findVolumeDelta(int direction, int volume) {
- int delta = 0;
- if (direction == AudioManager.ADJUST_RAISE) {
- if (volume == MAX_MASTER_VOLUME) {
- return 0;
- }
- // This is the default value if we make it to the end
- delta = mMasterVolumeRamp[1];
- // If we're raising the volume move down the ramp array until we
- // find the volume we're above and use that groups delta.
- for (int i = mMasterVolumeRamp.length - 1; i > 1; i -= 2) {
- if (volume >= mMasterVolumeRamp[i - 1]) {
- delta = mMasterVolumeRamp[i];
- break;
- }
- }
- } else if (direction == AudioManager.ADJUST_LOWER){
- if (volume == 0) {
- return 0;
- }
- int length = mMasterVolumeRamp.length;
- // This is the default value if we make it to the end
- delta = -mMasterVolumeRamp[length - 1];
- // If we're lowering the volume move up the ramp array until we
- // find the volume we're below and use the group below it's delta
- for (int i = 2; i < length; i += 2) {
- if (volume <= mMasterVolumeRamp[i]) {
- delta = -mMasterVolumeRamp[i - 1];
- break;
- }
- }
- }
- return delta;
- }
-
- private void sendBroadcastToAll(Intent intent) {
- intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT);
- final long ident = Binder.clearCallingIdentity();
- try {
- mContext.sendBroadcastAsUser(intent, UserHandle.ALL);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- private void sendStickyBroadcastToAll(Intent intent) {
- final long ident = Binder.clearCallingIdentity();
- try {
- mContext.sendStickyBroadcastAsUser(intent, UserHandle.ALL);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- // UI update and Broadcast Intent
- private void sendVolumeUpdate(int streamType, int oldIndex, int index, int flags) {
- if (!isPlatformVoice() && (streamType == AudioSystem.STREAM_RING)) {
- streamType = AudioSystem.STREAM_NOTIFICATION;
- }
-
- if (streamType == AudioSystem.STREAM_MUSIC) {
- flags = updateFlagsForSystemAudio(flags);
- }
- mVolumeController.postVolumeChanged(streamType, flags);
-
- if ((flags & AudioManager.FLAG_FIXED_VOLUME) == 0) {
- oldIndex = (oldIndex + 5) / 10;
- index = (index + 5) / 10;
- Intent intent = new Intent(AudioManager.VOLUME_CHANGED_ACTION);
- intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_TYPE, streamType);
- intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, index);
- intent.putExtra(AudioManager.EXTRA_PREV_VOLUME_STREAM_VALUE, oldIndex);
- sendBroadcastToAll(intent);
- }
- }
-
- // If Hdmi-CEC system audio mode is on, we show volume bar only when TV
- // receives volume notification from Audio Receiver.
- private int updateFlagsForSystemAudio(int flags) {
- if (mHdmiTvClient != null) {
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported &&
- ((flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) == 0)) {
- flags &= ~AudioManager.FLAG_SHOW_UI;
- }
- }
- }
- return flags;
- }
-
- // UI update and Broadcast Intent
- private void sendMasterVolumeUpdate(int flags, int oldVolume, int newVolume) {
- mVolumeController.postMasterVolumeChanged(updateFlagsForSystemAudio(flags));
-
- Intent intent = new Intent(AudioManager.MASTER_VOLUME_CHANGED_ACTION);
- intent.putExtra(AudioManager.EXTRA_PREV_MASTER_VOLUME_VALUE, oldVolume);
- intent.putExtra(AudioManager.EXTRA_MASTER_VOLUME_VALUE, newVolume);
- sendBroadcastToAll(intent);
- }
-
- // UI update and Broadcast Intent
- private void sendMasterMuteUpdate(boolean muted, int flags) {
- mVolumeController.postMasterMuteChanged(updateFlagsForSystemAudio(flags));
- broadcastMasterMuteStatus(muted);
- }
-
- private void broadcastMasterMuteStatus(boolean muted) {
- Intent intent = new Intent(AudioManager.MASTER_MUTE_CHANGED_ACTION);
- intent.putExtra(AudioManager.EXTRA_MASTER_VOLUME_MUTED, muted);
- intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT
- | Intent.FLAG_RECEIVER_REPLACE_PENDING);
- sendStickyBroadcastToAll(intent);
- }
-
- /**
- * Sets the stream state's index, and posts a message to set system volume.
- * This will not call out to the UI. Assumes a valid stream type.
- *
- * @param streamType Type of the stream
- * @param index Desired volume index of the stream
- * @param device the device whose volume must be changed
- * @param force If true, set the volume even if the desired volume is same
- * as the current volume.
- */
- private void setStreamVolumeInt(int streamType,
- int index,
- int device,
- boolean force) {
- VolumeStreamState streamState = mStreamStates[streamType];
-
- if (streamState.setIndex(index, device) || force) {
- // Post message to set system volume (it in turn will post a message
- // to persist).
- sendMsg(mAudioHandler,
- MSG_SET_DEVICE_VOLUME,
- SENDMSG_QUEUE,
- device,
- 0,
- streamState,
- 0);
- }
- }
-
- /** @see AudioManager#setStreamSolo(int, boolean) */
- public void setStreamSolo(int streamType, boolean state, IBinder cb) {
- if (mUseFixedVolume) {
- return;
- }
- int streamAlias = mStreamVolumeAlias[streamType];
- for (int stream = 0; stream < mStreamStates.length; stream++) {
- if (!isStreamAffectedByMute(streamAlias) || streamAlias == mStreamVolumeAlias[stream]) {
- continue;
- }
- mStreamStates[stream].mute(cb, state);
- }
- }
-
- /** @see AudioManager#setStreamMute(int, boolean) */
- public void setStreamMute(int streamType, boolean state, IBinder cb) {
- if (mUseFixedVolume) {
- return;
- }
- if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- streamType = getActiveStreamType(streamType);
- }
- int streamAlias = mStreamVolumeAlias[streamType];
- if (isStreamAffectedByMute(streamAlias)) {
- if (streamAlias == AudioSystem.STREAM_MUSIC) {
- setSystemAudioMute(state);
- }
- for (int stream = 0; stream < mStreamStates.length; stream++) {
- if (streamAlias == mStreamVolumeAlias[stream]) {
- mStreamStates[stream].mute(cb, state);
-
- Intent intent = new Intent(AudioManager.STREAM_MUTE_CHANGED_ACTION);
- intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_TYPE, stream);
- intent.putExtra(AudioManager.EXTRA_STREAM_VOLUME_MUTED, state);
- sendBroadcastToAll(intent);
- }
- }
- }
- }
-
- private void setSystemAudioMute(boolean state) {
- if (mHdmiManager == null || mHdmiTvClient == null) return;
- synchronized (mHdmiManager) {
- if (!mHdmiSystemAudioSupported) return;
- synchronized (mHdmiTvClient) {
- final long token = Binder.clearCallingIdentity();
- try {
- mHdmiTvClient.setSystemAudioMute(state);
- } finally {
- Binder.restoreCallingIdentity(token);
- }
- }
- }
- }
-
- /** get stream mute state. */
- public boolean isStreamMute(int streamType) {
- if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- streamType = getActiveStreamType(streamType);
- }
- synchronized (VolumeStreamState.class) {
- return mStreamStates[streamType].isMuted_syncVSS();
- }
- }
-
- private class RmtSbmxFullVolDeathHandler implements IBinder.DeathRecipient {
- private IBinder mICallback; // To be notified of client's death
-
- RmtSbmxFullVolDeathHandler(IBinder cb) {
- mICallback = cb;
- try {
- cb.linkToDeath(this, 0/*flags*/);
- } catch (RemoteException e) {
- Log.e(TAG, "can't link to death", e);
- }
- }
-
- boolean isHandlerFor(IBinder cb) {
- return mICallback.equals(cb);
- }
-
- void forget() {
- try {
- mICallback.unlinkToDeath(this, 0/*flags*/);
- } catch (NoSuchElementException e) {
- Log.e(TAG, "error unlinking to death", e);
- }
- }
-
- public void binderDied() {
- Log.w(TAG, "Recorder with remote submix at full volume died " + mICallback);
- forceRemoteSubmixFullVolume(false, mICallback);
- }
- }
-
- /**
- * call must be synchronized on mRmtSbmxFullVolDeathHandlers
- * @return true if there is a registered death handler, false otherwise */
- private boolean discardRmtSbmxFullVolDeathHandlerFor(IBinder cb) {
- Iterator<RmtSbmxFullVolDeathHandler> it = mRmtSbmxFullVolDeathHandlers.iterator();
- while (it.hasNext()) {
- final RmtSbmxFullVolDeathHandler handler = it.next();
- if (handler.isHandlerFor(cb)) {
- handler.forget();
- mRmtSbmxFullVolDeathHandlers.remove(handler);
- return true;
- }
- }
- return false;
- }
-
- /** call synchronized on mRmtSbmxFullVolDeathHandlers */
- private boolean hasRmtSbmxFullVolDeathHandlerFor(IBinder cb) {
- Iterator<RmtSbmxFullVolDeathHandler> it = mRmtSbmxFullVolDeathHandlers.iterator();
- while (it.hasNext()) {
- if (it.next().isHandlerFor(cb)) {
- return true;
- }
- }
- return false;
- }
-
- private int mRmtSbmxFullVolRefCount = 0;
- private ArrayList<RmtSbmxFullVolDeathHandler> mRmtSbmxFullVolDeathHandlers =
- new ArrayList<RmtSbmxFullVolDeathHandler>();
-
- public void forceRemoteSubmixFullVolume(boolean startForcing, IBinder cb) {
- if (cb == null) {
- return;
- }
- if ((PackageManager.PERMISSION_GRANTED != mContext.checkCallingOrSelfPermission(
- android.Manifest.permission.CAPTURE_AUDIO_OUTPUT))) {
- Log.w(TAG, "Trying to call forceRemoteSubmixFullVolume() without CAPTURE_AUDIO_OUTPUT");
- return;
- }
- synchronized(mRmtSbmxFullVolDeathHandlers) {
- boolean applyRequired = false;
- if (startForcing) {
- if (!hasRmtSbmxFullVolDeathHandlerFor(cb)) {
- mRmtSbmxFullVolDeathHandlers.add(new RmtSbmxFullVolDeathHandler(cb));
- if (mRmtSbmxFullVolRefCount == 0) {
- mFullVolumeDevices |= AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
- mFixedVolumeDevices |= AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
- applyRequired = true;
- }
- mRmtSbmxFullVolRefCount++;
- }
- } else {
- if (discardRmtSbmxFullVolDeathHandlerFor(cb) && (mRmtSbmxFullVolRefCount > 0)) {
- mRmtSbmxFullVolRefCount--;
- if (mRmtSbmxFullVolRefCount == 0) {
- mFullVolumeDevices &= ~AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
- mFixedVolumeDevices &= ~AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
- applyRequired = true;
- }
- }
- }
- if (applyRequired) {
- // Assumes only STREAM_MUSIC going through DEVICE_OUT_REMOTE_SUBMIX
- checkAllFixedVolumeDevices(AudioSystem.STREAM_MUSIC);
- mStreamStates[AudioSystem.STREAM_MUSIC].applyAllVolumes();
- }
- }
- }
-
- /** @see AudioManager#setMasterMute(boolean, int) */
- public void setMasterMute(boolean state, int flags, String callingPackage, IBinder cb) {
- setMasterMuteInternal(state, flags, callingPackage, cb, Binder.getCallingUid());
- }
-
- private void setMasterMuteInternal(boolean state, int flags, String callingPackage, IBinder cb,
- int uid) {
- if (mUseFixedVolume) {
- return;
- }
- if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, uid, callingPackage)
- != AppOpsManager.MODE_ALLOWED) {
- return;
- }
- if (state != AudioSystem.getMasterMute()) {
- setSystemAudioMute(state);
- AudioSystem.setMasterMute(state);
- // Post a persist master volume msg
- sendMsg(mAudioHandler, MSG_PERSIST_MASTER_VOLUME_MUTE, SENDMSG_REPLACE, state ? 1
- : 0, UserHandle.getCallingUserId(), null, PERSIST_DELAY);
- sendMasterMuteUpdate(state, flags);
-
- Intent intent = new Intent(AudioManager.MASTER_MUTE_CHANGED_ACTION);
- intent.putExtra(AudioManager.EXTRA_MASTER_VOLUME_MUTED, state);
- sendBroadcastToAll(intent);
- }
- }
-
- /** get master mute state. */
- public boolean isMasterMute() {
- return AudioSystem.getMasterMute();
- }
-
- protected static int getMaxStreamVolume(int streamType) {
- return MAX_STREAM_VOLUME[streamType];
- }
-
- public static int getDefaultStreamVolume(int streamType) {
- return DEFAULT_STREAM_VOLUME[streamType];
- }
-
- /** @see AudioManager#getStreamVolume(int) */
- public int getStreamVolume(int streamType) {
- ensureValidStreamType(streamType);
- int device = getDeviceForStream(streamType);
- synchronized (VolumeStreamState.class) {
- int index = mStreamStates[streamType].getIndex(device);
-
- // by convention getStreamVolume() returns 0 when a stream is muted.
- if (mStreamStates[streamType].isMuted_syncVSS()) {
- index = 0;
- }
- if (index != 0 && (mStreamVolumeAlias[streamType] == AudioSystem.STREAM_MUSIC) &&
- (device & mFixedVolumeDevices) != 0) {
- index = mStreamStates[streamType].getMaxIndex();
- }
- return (index + 5) / 10;
- }
- }
-
- @Override
- public int getMasterVolume() {
- if (isMasterMute()) return 0;
- return getLastAudibleMasterVolume();
- }
-
- @Override
- public void setMasterVolume(int volume, int flags, String callingPackage) {
- setMasterVolume(volume, flags, callingPackage, Binder.getCallingUid());
- }
-
- public void setMasterVolume(int volume, int flags, String callingPackage, int uid) {
- if (mUseFixedVolume) {
- return;
- }
-
- if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, uid, callingPackage)
- != AppOpsManager.MODE_ALLOWED) {
- return;
- }
-
- if (volume < 0) {
- volume = 0;
- } else if (volume > MAX_MASTER_VOLUME) {
- volume = MAX_MASTER_VOLUME;
- }
- doSetMasterVolume((float)volume / MAX_MASTER_VOLUME, flags);
- }
-
- private void doSetMasterVolume(float volume, int flags) {
- // don't allow changing master volume when muted
- if (!AudioSystem.getMasterMute()) {
- int oldVolume = getMasterVolume();
- AudioSystem.setMasterVolume(volume);
-
- int newVolume = getMasterVolume();
- if (newVolume != oldVolume) {
- // Post a persist master volume msg
- sendMsg(mAudioHandler, MSG_PERSIST_MASTER_VOLUME, SENDMSG_REPLACE,
- Math.round(volume * (float)1000.0), 0, null, PERSIST_DELAY);
- setSystemAudioVolume(oldVolume, newVolume, getMasterMaxVolume(), flags);
- }
- // Send the volume update regardless whether there was a change.
- sendMasterVolumeUpdate(flags, oldVolume, newVolume);
- }
- }
-
- /** @see AudioManager#getStreamMaxVolume(int) */
- public int getStreamMaxVolume(int streamType) {
- ensureValidStreamType(streamType);
- return (mStreamStates[streamType].getMaxIndex() + 5) / 10;
- }
-
- public int getMasterMaxVolume() {
- return MAX_MASTER_VOLUME;
- }
-
- /** Get last audible volume before stream was muted. */
- public int getLastAudibleStreamVolume(int streamType) {
- ensureValidStreamType(streamType);
- int device = getDeviceForStream(streamType);
- return (mStreamStates[streamType].getIndex(device) + 5) / 10;
- }
-
- /** Get last audible master volume before it was muted. */
- public int getLastAudibleMasterVolume() {
- return Math.round(AudioSystem.getMasterVolume() * MAX_MASTER_VOLUME);
- }
-
- /** @see AudioManager#getMasterStreamType() */
- public int getMasterStreamType() {
- return mStreamVolumeAlias[AudioSystem.STREAM_SYSTEM];
- }
-
- /** @see AudioManager#setMicrophoneMute(boolean) */
- public void setMicrophoneMute(boolean on, String callingPackage) {
- if (mAppOps.noteOp(AppOpsManager.OP_MUTE_MICROPHONE, Binder.getCallingUid(),
- callingPackage) != AppOpsManager.MODE_ALLOWED) {
- return;
- }
- if (!checkAudioSettingsPermission("setMicrophoneMute()")) {
- return;
- }
-
- AudioSystem.muteMicrophone(on);
- // Post a persist microphone msg.
- sendMsg(mAudioHandler, MSG_PERSIST_MICROPHONE_MUTE, SENDMSG_REPLACE, on ? 1
- : 0, UserHandle.getCallingUserId(), null, PERSIST_DELAY);
- }
-
- @Override
- public int getRingerModeExternal() {
- synchronized(mSettingsLock) {
- return mRingerModeExternal;
- }
- }
-
- @Override
- public int getRingerModeInternal() {
- synchronized(mSettingsLock) {
- return mRingerMode;
- }
- }
-
- private void ensureValidRingerMode(int ringerMode) {
- if (!isValidRingerMode(ringerMode)) {
- throw new IllegalArgumentException("Bad ringer mode " + ringerMode);
- }
- }
-
- /** @see AudioManager#isValidRingerMode(int) */
- public boolean isValidRingerMode(int ringerMode) {
- return ringerMode >= 0 && ringerMode <= AudioManager.RINGER_MODE_MAX;
- }
-
- public void setRingerModeExternal(int ringerMode, String caller) {
- setRingerMode(ringerMode, caller, true /*external*/);
- }
-
- public void setRingerModeInternal(int ringerMode, String caller) {
- enforceSelfOrSystemUI("setRingerModeInternal");
- setRingerMode(ringerMode, caller, false /*external*/);
- }
-
- private void setRingerMode(int ringerMode, String caller, boolean external) {
- if (mUseFixedVolume || isPlatformTelevision()) {
- return;
- }
- if (caller == null || caller.length() == 0) {
- throw new IllegalArgumentException("Bad caller: " + caller);
- }
- ensureValidRingerMode(ringerMode);
- if ((ringerMode == AudioManager.RINGER_MODE_VIBRATE) && !mHasVibrator) {
- ringerMode = AudioManager.RINGER_MODE_SILENT;
- }
- final long identity = Binder.clearCallingIdentity();
- try {
- synchronized (mSettingsLock) {
- final int ringerModeInternal = getRingerModeInternal();
- final int ringerModeExternal = getRingerModeExternal();
- if (external) {
- setRingerModeExt(ringerMode);
- if (mRingerModeDelegate != null) {
- ringerMode = mRingerModeDelegate.onSetRingerModeExternal(ringerModeExternal,
- ringerMode, caller, ringerModeInternal);
- }
- if (ringerMode != ringerModeInternal) {
- setRingerModeInt(ringerMode, true /*persist*/);
- }
- } else /*internal*/ {
- if (ringerMode != ringerModeInternal) {
- setRingerModeInt(ringerMode, true /*persist*/);
- }
- if (mRingerModeDelegate != null) {
- ringerMode = mRingerModeDelegate.onSetRingerModeInternal(ringerModeInternal,
- ringerMode, caller, ringerModeExternal);
- }
- setRingerModeExt(ringerMode);
- }
- }
- } finally {
- Binder.restoreCallingIdentity(identity);
- }
- }
-
- private void setRingerModeExt(int ringerMode) {
- synchronized(mSettingsLock) {
- if (ringerMode == mRingerModeExternal) return;
- mRingerModeExternal = ringerMode;
- }
- // Send sticky broadcast
- broadcastRingerMode(AudioManager.RINGER_MODE_CHANGED_ACTION, ringerMode);
- }
-
- private void setRingerModeInt(int ringerMode, boolean persist) {
- final boolean change;
- synchronized(mSettingsLock) {
- change = mRingerMode != ringerMode;
- mRingerMode = ringerMode;
- }
-
- // Mute stream if not previously muted by ringer mode and ringer mode
- // is not RINGER_MODE_NORMAL and stream is affected by ringer mode.
- // Unmute stream if previously muted by ringer mode and ringer mode
- // is RINGER_MODE_NORMAL or stream is not affected by ringer mode.
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- final boolean ringerModeMute = ringerMode == AudioManager.RINGER_MODE_VIBRATE
- || ringerMode == AudioManager.RINGER_MODE_SILENT;
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- final boolean isMuted = isStreamMutedByRingerMode(streamType);
- final boolean shouldMute = ringerModeMute && isStreamAffectedByRingerMode(streamType);
- if (isMuted == shouldMute) continue;
- if (!shouldMute) {
- // unmute
- // ring and notifications volume should never be 0 when not silenced
- // on voice capable devices or devices that support vibration
- if ((isPlatformVoice() || mHasVibrator) &&
- mStreamVolumeAlias[streamType] == AudioSystem.STREAM_RING) {
- synchronized (VolumeStreamState.class) {
- Set set = mStreamStates[streamType].mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- if ((Integer)entry.getValue() == 0) {
- entry.setValue(10);
- }
- }
- }
- }
- mStreamStates[streamType].mute(null, false);
- mRingerModeMutedStreams &= ~(1 << streamType);
- } else {
- // mute
- mStreamStates[streamType].mute(null, true);
- mRingerModeMutedStreams |= (1 << streamType);
- }
- }
-
- // Post a persist ringer mode msg
- if (persist) {
- sendMsg(mAudioHandler, MSG_PERSIST_RINGER_MODE,
- SENDMSG_REPLACE, 0, 0, null, PERSIST_DELAY);
- }
- if (change) {
- // Send sticky broadcast
- broadcastRingerMode(AudioManager.INTERNAL_RINGER_MODE_CHANGED_ACTION, ringerMode);
- }
- }
-
- private void restoreMasterVolume() {
- if (mUseFixedVolume) {
- AudioSystem.setMasterVolume(1.0f);
- return;
- }
- if (mUseMasterVolume) {
- float volume = Settings.System.getFloatForUser(mContentResolver,
- Settings.System.VOLUME_MASTER, -1.0f, UserHandle.USER_CURRENT);
- if (volume >= 0.0f) {
- AudioSystem.setMasterVolume(volume);
- }
- }
- }
-
- /** @see AudioManager#shouldVibrate(int) */
- public boolean shouldVibrate(int vibrateType) {
- if (!mHasVibrator) return false;
-
- switch (getVibrateSetting(vibrateType)) {
-
- case AudioManager.VIBRATE_SETTING_ON:
- return getRingerModeExternal() != AudioManager.RINGER_MODE_SILENT;
-
- case AudioManager.VIBRATE_SETTING_ONLY_SILENT:
- return getRingerModeExternal() == AudioManager.RINGER_MODE_VIBRATE;
-
- case AudioManager.VIBRATE_SETTING_OFF:
- // return false, even for incoming calls
- return false;
-
- default:
- return false;
- }
- }
-
- /** @see AudioManager#getVibrateSetting(int) */
- public int getVibrateSetting(int vibrateType) {
- if (!mHasVibrator) return AudioManager.VIBRATE_SETTING_OFF;
- return (mVibrateSetting >> (vibrateType * 2)) & 3;
- }
-
- /** @see AudioManager#setVibrateSetting(int, int) */
- public void setVibrateSetting(int vibrateType, int vibrateSetting) {
-
- if (!mHasVibrator) return;
-
- mVibrateSetting = getValueForVibrateSetting(mVibrateSetting, vibrateType, vibrateSetting);
-
- // Broadcast change
- broadcastVibrateSetting(vibrateType);
-
- }
-
- /**
- * @see #setVibrateSetting(int, int)
- */
- public static int getValueForVibrateSetting(int existingValue, int vibrateType,
- int vibrateSetting) {
-
- // First clear the existing setting. Each vibrate type has two bits in
- // the value. Note '3' is '11' in binary.
- existingValue &= ~(3 << (vibrateType * 2));
-
- // Set into the old value
- existingValue |= (vibrateSetting & 3) << (vibrateType * 2);
-
- return existingValue;
- }
-
- private class SetModeDeathHandler implements IBinder.DeathRecipient {
- private IBinder mCb; // To be notified of client's death
- private int mPid;
- private int mMode = AudioSystem.MODE_NORMAL; // Current mode set by this client
-
- SetModeDeathHandler(IBinder cb, int pid) {
- mCb = cb;
- mPid = pid;
- }
-
- public void binderDied() {
- int newModeOwnerPid = 0;
- synchronized(mSetModeDeathHandlers) {
- Log.w(TAG, "setMode() client died");
- int index = mSetModeDeathHandlers.indexOf(this);
- if (index < 0) {
- Log.w(TAG, "unregistered setMode() client died");
- } else {
- newModeOwnerPid = setModeInt(AudioSystem.MODE_NORMAL, mCb, mPid);
- }
- }
- // when entering RINGTONE, IN_CALL or IN_COMMUNICATION mode, clear all
- // SCO connections not started by the application changing the mode
- if (newModeOwnerPid != 0) {
- final long ident = Binder.clearCallingIdentity();
- disconnectBluetoothSco(newModeOwnerPid);
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- public int getPid() {
- return mPid;
- }
-
- public void setMode(int mode) {
- mMode = mode;
- }
-
- public int getMode() {
- return mMode;
- }
-
- public IBinder getBinder() {
- return mCb;
- }
- }
-
- /** @see AudioManager#setMode(int) */
- public void setMode(int mode, IBinder cb) {
- if (DEBUG_MODE) { Log.v(TAG, "setMode(mode=" + mode + ")"); }
- if (!checkAudioSettingsPermission("setMode()")) {
- return;
- }
-
- if ( (mode == AudioSystem.MODE_IN_CALL) &&
- (mContext.checkCallingOrSelfPermission(
- android.Manifest.permission.MODIFY_PHONE_STATE)
- != PackageManager.PERMISSION_GRANTED)) {
- Log.w(TAG, "MODIFY_PHONE_STATE Permission Denial: setMode(MODE_IN_CALL) from pid="
- + Binder.getCallingPid() + ", uid=" + Binder.getCallingUid());
- return;
- }
-
- if (mode < AudioSystem.MODE_CURRENT || mode >= AudioSystem.NUM_MODES) {
- return;
- }
-
- int newModeOwnerPid = 0;
- synchronized(mSetModeDeathHandlers) {
- if (mode == AudioSystem.MODE_CURRENT) {
- mode = mMode;
- }
- newModeOwnerPid = setModeInt(mode, cb, Binder.getCallingPid());
- }
- // when entering RINGTONE, IN_CALL or IN_COMMUNICATION mode, clear all
- // SCO connections not started by the application changing the mode
- if (newModeOwnerPid != 0) {
- disconnectBluetoothSco(newModeOwnerPid);
- }
- }
-
- // must be called synchronized on mSetModeDeathHandlers
- // setModeInt() returns a valid PID if the audio mode was successfully set to
- // any mode other than NORMAL.
- private int setModeInt(int mode, IBinder cb, int pid) {
- if (DEBUG_MODE) { Log.v(TAG, "setModeInt(mode=" + mode + ", pid=" + pid + ")"); }
- int newModeOwnerPid = 0;
- if (cb == null) {
- Log.e(TAG, "setModeInt() called with null binder");
- return newModeOwnerPid;
- }
-
- SetModeDeathHandler hdlr = null;
- Iterator iter = mSetModeDeathHandlers.iterator();
- while (iter.hasNext()) {
- SetModeDeathHandler h = (SetModeDeathHandler)iter.next();
- if (h.getPid() == pid) {
- hdlr = h;
- // Remove from client list so that it is re-inserted at top of list
- iter.remove();
- hdlr.getBinder().unlinkToDeath(hdlr, 0);
- break;
- }
- }
- int status = AudioSystem.AUDIO_STATUS_OK;
- do {
- if (mode == AudioSystem.MODE_NORMAL) {
- // get new mode from client at top the list if any
- if (!mSetModeDeathHandlers.isEmpty()) {
- hdlr = mSetModeDeathHandlers.get(0);
- cb = hdlr.getBinder();
- mode = hdlr.getMode();
- if (DEBUG_MODE) {
- Log.w(TAG, " using mode=" + mode + " instead due to death hdlr at pid="
- + hdlr.mPid);
- }
- }
- } else {
- if (hdlr == null) {
- hdlr = new SetModeDeathHandler(cb, pid);
- }
- // Register for client death notification
- try {
- cb.linkToDeath(hdlr, 0);
- } catch (RemoteException e) {
- // Client has died!
- Log.w(TAG, "setMode() could not link to "+cb+" binder death");
- }
-
- // Last client to call setMode() is always at top of client list
- // as required by SetModeDeathHandler.binderDied()
- mSetModeDeathHandlers.add(0, hdlr);
- hdlr.setMode(mode);
- }
-
- if (mode != mMode) {
- status = AudioSystem.setPhoneState(mode);
- if (status == AudioSystem.AUDIO_STATUS_OK) {
- if (DEBUG_MODE) { Log.v(TAG, " mode successfully set to " + mode); }
- mMode = mode;
- } else {
- if (hdlr != null) {
- mSetModeDeathHandlers.remove(hdlr);
- cb.unlinkToDeath(hdlr, 0);
- }
- // force reading new top of mSetModeDeathHandlers stack
- if (DEBUG_MODE) { Log.w(TAG, " mode set to MODE_NORMAL after phoneState pb"); }
- mode = AudioSystem.MODE_NORMAL;
- }
- } else {
- status = AudioSystem.AUDIO_STATUS_OK;
- }
- } while (status != AudioSystem.AUDIO_STATUS_OK && !mSetModeDeathHandlers.isEmpty());
-
- if (status == AudioSystem.AUDIO_STATUS_OK) {
- if (mode != AudioSystem.MODE_NORMAL) {
- if (mSetModeDeathHandlers.isEmpty()) {
- Log.e(TAG, "setMode() different from MODE_NORMAL with empty mode client stack");
- } else {
- newModeOwnerPid = mSetModeDeathHandlers.get(0).getPid();
- }
- }
- int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
- int device = getDeviceForStream(streamType);
- int index = mStreamStates[mStreamVolumeAlias[streamType]].getIndex(device);
- setStreamVolumeInt(mStreamVolumeAlias[streamType], index, device, true);
-
- updateStreamVolumeAlias(true /*updateVolumes*/);
- }
- return newModeOwnerPid;
- }
-
- /** @see AudioManager#getMode() */
- public int getMode() {
- return mMode;
- }
-
- //==========================================================================================
- // Sound Effects
- //==========================================================================================
-
- private static final String TAG_AUDIO_ASSETS = "audio_assets";
- private static final String ATTR_VERSION = "version";
- private static final String TAG_GROUP = "group";
- private static final String ATTR_GROUP_NAME = "name";
- private static final String TAG_ASSET = "asset";
- private static final String ATTR_ASSET_ID = "id";
- private static final String ATTR_ASSET_FILE = "file";
-
- private static final String ASSET_FILE_VERSION = "1.0";
- private static final String GROUP_TOUCH_SOUNDS = "touch_sounds";
-
- private static final int SOUND_EFFECTS_LOAD_TIMEOUT_MS = 5000;
-
- class LoadSoundEffectReply {
- public int mStatus = 1;
- };
-
- private void loadTouchSoundAssetDefaults() {
- SOUND_EFFECT_FILES.add("Effect_Tick.ogg");
- for (int i = 0; i < AudioManager.NUM_SOUND_EFFECTS; i++) {
- SOUND_EFFECT_FILES_MAP[i][0] = 0;
- SOUND_EFFECT_FILES_MAP[i][1] = -1;
- }
- }
-
- private void loadTouchSoundAssets() {
- XmlResourceParser parser = null;
-
- // only load assets once.
- if (!SOUND_EFFECT_FILES.isEmpty()) {
- return;
- }
-
- loadTouchSoundAssetDefaults();
-
- try {
- parser = mContext.getResources().getXml(com.android.internal.R.xml.audio_assets);
-
- XmlUtils.beginDocument(parser, TAG_AUDIO_ASSETS);
- String version = parser.getAttributeValue(null, ATTR_VERSION);
- boolean inTouchSoundsGroup = false;
-
- if (ASSET_FILE_VERSION.equals(version)) {
- while (true) {
- XmlUtils.nextElement(parser);
- String element = parser.getName();
- if (element == null) {
- break;
- }
- if (element.equals(TAG_GROUP)) {
- String name = parser.getAttributeValue(null, ATTR_GROUP_NAME);
- if (GROUP_TOUCH_SOUNDS.equals(name)) {
- inTouchSoundsGroup = true;
- break;
- }
- }
- }
- while (inTouchSoundsGroup) {
- XmlUtils.nextElement(parser);
- String element = parser.getName();
- if (element == null) {
- break;
- }
- if (element.equals(TAG_ASSET)) {
- String id = parser.getAttributeValue(null, ATTR_ASSET_ID);
- String file = parser.getAttributeValue(null, ATTR_ASSET_FILE);
- int fx;
-
- try {
- Field field = AudioManager.class.getField(id);
- fx = field.getInt(null);
- } catch (Exception e) {
- Log.w(TAG, "Invalid touch sound ID: "+id);
- continue;
- }
-
- int i = SOUND_EFFECT_FILES.indexOf(file);
- if (i == -1) {
- i = SOUND_EFFECT_FILES.size();
- SOUND_EFFECT_FILES.add(file);
- }
- SOUND_EFFECT_FILES_MAP[fx][0] = i;
- } else {
- break;
- }
- }
- }
- } catch (Resources.NotFoundException e) {
- Log.w(TAG, "audio assets file not found", e);
- } catch (XmlPullParserException e) {
- Log.w(TAG, "XML parser exception reading touch sound assets", e);
- } catch (IOException e) {
- Log.w(TAG, "I/O exception reading touch sound assets", e);
- } finally {
- if (parser != null) {
- parser.close();
- }
- }
- }
-
- /** @see AudioManager#playSoundEffect(int) */
- public void playSoundEffect(int effectType) {
- playSoundEffectVolume(effectType, -1.0f);
- }
-
- /** @see AudioManager#playSoundEffect(int, float) */
- public void playSoundEffectVolume(int effectType, float volume) {
- if (effectType >= AudioManager.NUM_SOUND_EFFECTS || effectType < 0) {
- Log.w(TAG, "AudioService effectType value " + effectType + " out of range");
- return;
- }
-
- sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SENDMSG_QUEUE,
- effectType, (int) (volume * 1000), null, 0);
- }
-
- /**
- * Loads samples into the soundpool.
- * This method must be called at first when sound effects are enabled
- */
- public boolean loadSoundEffects() {
- int attempts = 3;
- LoadSoundEffectReply reply = new LoadSoundEffectReply();
-
- synchronized (reply) {
- sendMsg(mAudioHandler, MSG_LOAD_SOUND_EFFECTS, SENDMSG_QUEUE, 0, 0, reply, 0);
- while ((reply.mStatus == 1) && (attempts-- > 0)) {
- try {
- reply.wait(SOUND_EFFECTS_LOAD_TIMEOUT_MS);
- } catch (InterruptedException e) {
- Log.w(TAG, "loadSoundEffects Interrupted while waiting sound pool loaded.");
- }
- }
- }
- return (reply.mStatus == 0);
- }
-
- /**
- * Unloads samples from the sound pool.
- * This method can be called to free some memory when
- * sound effects are disabled.
- */
- public void unloadSoundEffects() {
- sendMsg(mAudioHandler, MSG_UNLOAD_SOUND_EFFECTS, SENDMSG_QUEUE, 0, 0, null, 0);
- }
-
- class SoundPoolListenerThread extends Thread {
- public SoundPoolListenerThread() {
- super("SoundPoolListenerThread");
- }
-
- @Override
- public void run() {
-
- Looper.prepare();
- mSoundPoolLooper = Looper.myLooper();
-
- synchronized (mSoundEffectsLock) {
- if (mSoundPool != null) {
- mSoundPoolCallBack = new SoundPoolCallback();
- mSoundPool.setOnLoadCompleteListener(mSoundPoolCallBack);
- }
- mSoundEffectsLock.notify();
- }
- Looper.loop();
- }
- }
-
- private final class SoundPoolCallback implements
- android.media.SoundPool.OnLoadCompleteListener {
-
- int mStatus = 1; // 1 means neither error nor last sample loaded yet
- List<Integer> mSamples = new ArrayList<Integer>();
-
- public int status() {
- return mStatus;
- }
-
- public void setSamples(int[] samples) {
- for (int i = 0; i < samples.length; i++) {
- // do not wait ack for samples rejected upfront by SoundPool
- if (samples[i] > 0) {
- mSamples.add(samples[i]);
- }
- }
- }
-
- public void onLoadComplete(SoundPool soundPool, int sampleId, int status) {
- synchronized (mSoundEffectsLock) {
- int i = mSamples.indexOf(sampleId);
- if (i >= 0) {
- mSamples.remove(i);
- }
- if ((status != 0) || mSamples. isEmpty()) {
- mStatus = status;
- mSoundEffectsLock.notify();
- }
- }
- }
- }
-
- /** @see AudioManager#reloadAudioSettings() */
- public void reloadAudioSettings() {
- readAudioSettings(false /*userSwitch*/);
- }
-
- private void readAudioSettings(boolean userSwitch) {
- // restore ringer mode, ringer mode affected streams, mute affected streams and vibrate settings
- readPersistedSettings();
-
- // restore volume settings
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = 0; streamType < numStreamTypes; streamType++) {
- VolumeStreamState streamState = mStreamStates[streamType];
-
- if (userSwitch && mStreamVolumeAlias[streamType] == AudioSystem.STREAM_MUSIC) {
- continue;
- }
-
- streamState.readSettings();
- synchronized (VolumeStreamState.class) {
- // unmute stream that was muted but is not affect by mute anymore
- if (streamState.isMuted_syncVSS() && ((!isStreamAffectedByMute(streamType) &&
- !isStreamMutedByRingerMode(streamType)) || mUseFixedVolume)) {
- int size = streamState.mDeathHandlers.size();
- for (int i = 0; i < size; i++) {
- streamState.mDeathHandlers.get(i).mMuteCount = 1;
- streamState.mDeathHandlers.get(i).mute_syncVSS(false);
- }
- }
- }
- }
-
- // apply new ringer mode before checking volume for alias streams so that streams
- // muted by ringer mode have the correct volume
- setRingerModeInt(getRingerModeInternal(), false);
-
- checkAllFixedVolumeDevices();
- checkAllAliasStreamVolumes();
-
- synchronized (mSafeMediaVolumeState) {
- mMusicActiveMs = MathUtils.constrain(Settings.Secure.getIntForUser(mContentResolver,
- Settings.Secure.UNSAFE_VOLUME_MUSIC_ACTIVE_MS, 0, UserHandle.USER_CURRENT),
- 0, UNSAFE_VOLUME_MUSIC_ACTIVE_MS_MAX);
- if (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_ACTIVE) {
- enforceSafeMediaVolume();
- }
- }
- }
-
- /** @see AudioManager#setSpeakerphoneOn(boolean) */
- public void setSpeakerphoneOn(boolean on){
- if (!checkAudioSettingsPermission("setSpeakerphoneOn()")) {
- return;
- }
-
- if (on) {
- if (mForcedUseForComm == AudioSystem.FORCE_BT_SCO) {
- sendMsg(mAudioHandler, MSG_SET_FORCE_USE, SENDMSG_QUEUE,
- AudioSystem.FOR_RECORD, AudioSystem.FORCE_NONE, null, 0);
- }
- mForcedUseForComm = AudioSystem.FORCE_SPEAKER;
- } else if (mForcedUseForComm == AudioSystem.FORCE_SPEAKER){
- mForcedUseForComm = AudioSystem.FORCE_NONE;
- }
-
- sendMsg(mAudioHandler, MSG_SET_FORCE_USE, SENDMSG_QUEUE,
- AudioSystem.FOR_COMMUNICATION, mForcedUseForComm, null, 0);
- }
-
- /** @see AudioManager#isSpeakerphoneOn() */
- public boolean isSpeakerphoneOn() {
- return (mForcedUseForComm == AudioSystem.FORCE_SPEAKER);
- }
-
- /** @see AudioManager#setBluetoothScoOn(boolean) */
- public void setBluetoothScoOn(boolean on){
- if (!checkAudioSettingsPermission("setBluetoothScoOn()")) {
- return;
- }
-
- if (on) {
- mForcedUseForComm = AudioSystem.FORCE_BT_SCO;
- } else if (mForcedUseForComm == AudioSystem.FORCE_BT_SCO) {
- mForcedUseForComm = AudioSystem.FORCE_NONE;
- }
-
- sendMsg(mAudioHandler, MSG_SET_FORCE_USE, SENDMSG_QUEUE,
- AudioSystem.FOR_COMMUNICATION, mForcedUseForComm, null, 0);
- sendMsg(mAudioHandler, MSG_SET_FORCE_USE, SENDMSG_QUEUE,
- AudioSystem.FOR_RECORD, mForcedUseForComm, null, 0);
- }
-
- /** @see AudioManager#isBluetoothScoOn() */
- public boolean isBluetoothScoOn() {
- return (mForcedUseForComm == AudioSystem.FORCE_BT_SCO);
- }
-
- /** @see AudioManager#setBluetoothA2dpOn(boolean) */
- public void setBluetoothA2dpOn(boolean on) {
- synchronized (mBluetoothA2dpEnabledLock) {
- mBluetoothA2dpEnabled = on;
- sendMsg(mAudioHandler, MSG_SET_FORCE_BT_A2DP_USE, SENDMSG_QUEUE,
- AudioSystem.FOR_MEDIA,
- mBluetoothA2dpEnabled ? AudioSystem.FORCE_NONE : AudioSystem.FORCE_NO_BT_A2DP,
- null, 0);
- }
- }
-
- /** @see AudioManager#isBluetoothA2dpOn() */
- public boolean isBluetoothA2dpOn() {
- synchronized (mBluetoothA2dpEnabledLock) {
- return mBluetoothA2dpEnabled;
- }
- }
-
- /** @see AudioManager#startBluetoothSco() */
- public void startBluetoothSco(IBinder cb, int targetSdkVersion) {
- int scoAudioMode =
- (targetSdkVersion < Build.VERSION_CODES.JELLY_BEAN_MR2) ?
- SCO_MODE_VIRTUAL_CALL : SCO_MODE_UNDEFINED;
- startBluetoothScoInt(cb, scoAudioMode);
- }
-
- /** @see AudioManager#startBluetoothScoVirtualCall() */
- public void startBluetoothScoVirtualCall(IBinder cb) {
- startBluetoothScoInt(cb, SCO_MODE_VIRTUAL_CALL);
- }
-
- void startBluetoothScoInt(IBinder cb, int scoAudioMode){
- if (!checkAudioSettingsPermission("startBluetoothSco()") ||
- !mSystemReady) {
- return;
- }
- ScoClient client = getScoClient(cb, true);
- // The calling identity must be cleared before calling ScoClient.incCount().
- // inCount() calls requestScoState() which in turn can call BluetoothHeadset APIs
- // and this must be done on behalf of system server to make sure permissions are granted.
- // The caller identity must be cleared after getScoClient() because it is needed if a new
- // client is created.
- final long ident = Binder.clearCallingIdentity();
- client.incCount(scoAudioMode);
- Binder.restoreCallingIdentity(ident);
- }
-
- /** @see AudioManager#stopBluetoothSco() */
- public void stopBluetoothSco(IBinder cb){
- if (!checkAudioSettingsPermission("stopBluetoothSco()") ||
- !mSystemReady) {
- return;
- }
- ScoClient client = getScoClient(cb, false);
- // The calling identity must be cleared before calling ScoClient.decCount().
- // decCount() calls requestScoState() which in turn can call BluetoothHeadset APIs
- // and this must be done on behalf of system server to make sure permissions are granted.
- final long ident = Binder.clearCallingIdentity();
- if (client != null) {
- client.decCount();
- }
- Binder.restoreCallingIdentity(ident);
- }
-
-
- private class ScoClient implements IBinder.DeathRecipient {
- private IBinder mCb; // To be notified of client's death
- private int mCreatorPid;
- private int mStartcount; // number of SCO connections started by this client
-
- ScoClient(IBinder cb) {
- mCb = cb;
- mCreatorPid = Binder.getCallingPid();
- mStartcount = 0;
- }
-
- public void binderDied() {
- synchronized(mScoClients) {
- Log.w(TAG, "SCO client died");
- int index = mScoClients.indexOf(this);
- if (index < 0) {
- Log.w(TAG, "unregistered SCO client died");
- } else {
- clearCount(true);
- mScoClients.remove(this);
- }
- }
- }
-
- public void incCount(int scoAudioMode) {
- synchronized(mScoClients) {
- requestScoState(BluetoothHeadset.STATE_AUDIO_CONNECTED, scoAudioMode);
- if (mStartcount == 0) {
- try {
- mCb.linkToDeath(this, 0);
- } catch (RemoteException e) {
- // client has already died!
- Log.w(TAG, "ScoClient incCount() could not link to "+mCb+" binder death");
- }
- }
- mStartcount++;
- }
- }
-
- public void decCount() {
- synchronized(mScoClients) {
- if (mStartcount == 0) {
- Log.w(TAG, "ScoClient.decCount() already 0");
- } else {
- mStartcount--;
- if (mStartcount == 0) {
- try {
- mCb.unlinkToDeath(this, 0);
- } catch (NoSuchElementException e) {
- Log.w(TAG, "decCount() going to 0 but not registered to binder");
- }
- }
- requestScoState(BluetoothHeadset.STATE_AUDIO_DISCONNECTED, 0);
- }
- }
- }
-
- public void clearCount(boolean stopSco) {
- synchronized(mScoClients) {
- if (mStartcount != 0) {
- try {
- mCb.unlinkToDeath(this, 0);
- } catch (NoSuchElementException e) {
- Log.w(TAG, "clearCount() mStartcount: "+mStartcount+" != 0 but not registered to binder");
- }
- }
- mStartcount = 0;
- if (stopSco) {
- requestScoState(BluetoothHeadset.STATE_AUDIO_DISCONNECTED, 0);
- }
- }
- }
-
- public int getCount() {
- return mStartcount;
- }
-
- public IBinder getBinder() {
- return mCb;
- }
-
- public int getPid() {
- return mCreatorPid;
- }
-
- public int totalCount() {
- synchronized(mScoClients) {
- int count = 0;
- int size = mScoClients.size();
- for (int i = 0; i < size; i++) {
- count += mScoClients.get(i).getCount();
- }
- return count;
- }
- }
-
- private void requestScoState(int state, int scoAudioMode) {
- checkScoAudioState();
- if (totalCount() == 0) {
- if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
- // Make sure that the state transitions to CONNECTING even if we cannot initiate
- // the connection.
- broadcastScoConnectionState(AudioManager.SCO_AUDIO_STATE_CONNECTING);
- // Accept SCO audio activation only in NORMAL audio mode or if the mode is
- // currently controlled by the same client process.
- synchronized(mSetModeDeathHandlers) {
- if ((mSetModeDeathHandlers.isEmpty() ||
- mSetModeDeathHandlers.get(0).getPid() == mCreatorPid) &&
- (mScoAudioState == SCO_STATE_INACTIVE ||
- mScoAudioState == SCO_STATE_DEACTIVATE_REQ)) {
- if (mScoAudioState == SCO_STATE_INACTIVE) {
- mScoAudioMode = scoAudioMode;
- if (scoAudioMode == SCO_MODE_UNDEFINED) {
- if (mBluetoothHeadsetDevice != null) {
- mScoAudioMode = new Integer(Settings.Global.getInt(
- mContentResolver,
- "bluetooth_sco_channel_"+
- mBluetoothHeadsetDevice.getAddress(),
- SCO_MODE_VIRTUAL_CALL));
- if (mScoAudioMode > SCO_MODE_MAX || mScoAudioMode < 0) {
- mScoAudioMode = SCO_MODE_VIRTUAL_CALL;
- }
- } else {
- mScoAudioMode = SCO_MODE_RAW;
- }
- }
- if (mBluetoothHeadset != null && mBluetoothHeadsetDevice != null) {
- boolean status = false;
- if (mScoAudioMode == SCO_MODE_RAW) {
- status = mBluetoothHeadset.connectAudio();
- } else if (mScoAudioMode == SCO_MODE_VIRTUAL_CALL) {
- status = mBluetoothHeadset.startScoUsingVirtualVoiceCall(
- mBluetoothHeadsetDevice);
- } else if (mScoAudioMode == SCO_MODE_VR) {
- status = mBluetoothHeadset.startVoiceRecognition(
- mBluetoothHeadsetDevice);
- }
-
- if (status) {
- mScoAudioState = SCO_STATE_ACTIVE_INTERNAL;
- } else {
- broadcastScoConnectionState(
- AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- }
- } else if (getBluetoothHeadset()) {
- mScoAudioState = SCO_STATE_ACTIVATE_REQ;
- }
- } else {
- mScoAudioState = SCO_STATE_ACTIVE_INTERNAL;
- broadcastScoConnectionState(AudioManager.SCO_AUDIO_STATE_CONNECTED);
- }
- } else {
- broadcastScoConnectionState(AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- }
- }
- } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED &&
- (mScoAudioState == SCO_STATE_ACTIVE_INTERNAL ||
- mScoAudioState == SCO_STATE_ACTIVATE_REQ)) {
- if (mScoAudioState == SCO_STATE_ACTIVE_INTERNAL) {
- if (mBluetoothHeadset != null && mBluetoothHeadsetDevice != null) {
- boolean status = false;
- if (mScoAudioMode == SCO_MODE_RAW) {
- status = mBluetoothHeadset.disconnectAudio();
- } else if (mScoAudioMode == SCO_MODE_VIRTUAL_CALL) {
- status = mBluetoothHeadset.stopScoUsingVirtualVoiceCall(
- mBluetoothHeadsetDevice);
- } else if (mScoAudioMode == SCO_MODE_VR) {
- status = mBluetoothHeadset.stopVoiceRecognition(
- mBluetoothHeadsetDevice);
- }
-
- if (!status) {
- mScoAudioState = SCO_STATE_INACTIVE;
- broadcastScoConnectionState(
- AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- }
- } else if (getBluetoothHeadset()) {
- mScoAudioState = SCO_STATE_DEACTIVATE_REQ;
- }
- } else {
- mScoAudioState = SCO_STATE_INACTIVE;
- broadcastScoConnectionState(AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- }
- }
- }
- }
- }
-
- private void checkScoAudioState() {
- if (mBluetoothHeadset != null && mBluetoothHeadsetDevice != null &&
- mScoAudioState == SCO_STATE_INACTIVE &&
- mBluetoothHeadset.getAudioState(mBluetoothHeadsetDevice)
- != BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
- mScoAudioState = SCO_STATE_ACTIVE_EXTERNAL;
- }
- }
-
- private ScoClient getScoClient(IBinder cb, boolean create) {
- synchronized(mScoClients) {
- ScoClient client = null;
- int size = mScoClients.size();
- for (int i = 0; i < size; i++) {
- client = mScoClients.get(i);
- if (client.getBinder() == cb)
- return client;
- }
- if (create) {
- client = new ScoClient(cb);
- mScoClients.add(client);
- }
- return client;
- }
- }
-
- public void clearAllScoClients(int exceptPid, boolean stopSco) {
- synchronized(mScoClients) {
- ScoClient savedClient = null;
- int size = mScoClients.size();
- for (int i = 0; i < size; i++) {
- ScoClient cl = mScoClients.get(i);
- if (cl.getPid() != exceptPid) {
- cl.clearCount(stopSco);
- } else {
- savedClient = cl;
- }
- }
- mScoClients.clear();
- if (savedClient != null) {
- mScoClients.add(savedClient);
- }
- }
- }
-
- private boolean getBluetoothHeadset() {
- boolean result = false;
- BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
- if (adapter != null) {
- result = adapter.getProfileProxy(mContext, mBluetoothProfileServiceListener,
- BluetoothProfile.HEADSET);
- }
- // If we could not get a bluetooth headset proxy, send a failure message
- // without delay to reset the SCO audio state and clear SCO clients.
- // If we could get a proxy, send a delayed failure message that will reset our state
- // in case we don't receive onServiceConnected().
- sendMsg(mAudioHandler, MSG_BT_HEADSET_CNCT_FAILED,
- SENDMSG_REPLACE, 0, 0, null, result ? BT_HEADSET_CNCT_TIMEOUT_MS : 0);
- return result;
- }
-
- private void disconnectBluetoothSco(int exceptPid) {
- synchronized(mScoClients) {
- checkScoAudioState();
- if (mScoAudioState == SCO_STATE_ACTIVE_EXTERNAL ||
- mScoAudioState == SCO_STATE_DEACTIVATE_EXT_REQ) {
- if (mBluetoothHeadsetDevice != null) {
- if (mBluetoothHeadset != null) {
- if (!mBluetoothHeadset.stopVoiceRecognition(
- mBluetoothHeadsetDevice)) {
- sendMsg(mAudioHandler, MSG_BT_HEADSET_CNCT_FAILED,
- SENDMSG_REPLACE, 0, 0, null, 0);
- }
- } else if (mScoAudioState == SCO_STATE_ACTIVE_EXTERNAL &&
- getBluetoothHeadset()) {
- mScoAudioState = SCO_STATE_DEACTIVATE_EXT_REQ;
- }
- }
- } else {
- clearAllScoClients(exceptPid, true);
- }
- }
- }
-
- private void resetBluetoothSco() {
- synchronized(mScoClients) {
- clearAllScoClients(0, false);
- mScoAudioState = SCO_STATE_INACTIVE;
- broadcastScoConnectionState(AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
- }
- }
-
- private void broadcastScoConnectionState(int state) {
- sendMsg(mAudioHandler, MSG_BROADCAST_BT_CONNECTION_STATE,
- SENDMSG_QUEUE, state, 0, null, 0);
- }
-
- private void onBroadcastScoConnectionState(int state) {
- if (state != mScoConnectionState) {
- Intent newIntent = new Intent(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
- newIntent.putExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, state);
- newIntent.putExtra(AudioManager.EXTRA_SCO_AUDIO_PREVIOUS_STATE,
- mScoConnectionState);
- sendStickyBroadcastToAll(newIntent);
- mScoConnectionState = state;
- }
- }
-
- private BluetoothProfile.ServiceListener mBluetoothProfileServiceListener =
- new BluetoothProfile.ServiceListener() {
- public void onServiceConnected(int profile, BluetoothProfile proxy) {
- BluetoothDevice btDevice;
- List<BluetoothDevice> deviceList;
- switch(profile) {
- case BluetoothProfile.A2DP:
- synchronized (mConnectedDevices) {
- synchronized (mA2dpAvrcpLock) {
- mA2dp = (BluetoothA2dp) proxy;
- deviceList = mA2dp.getConnectedDevices();
- if (deviceList.size() > 0) {
- btDevice = deviceList.get(0);
- int state = mA2dp.getConnectionState(btDevice);
- int delay = checkSendBecomingNoisyIntent(
- AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
- (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0);
- queueMsgUnderWakeLock(mAudioHandler,
- MSG_SET_A2DP_SINK_CONNECTION_STATE,
- state,
- 0,
- btDevice,
- delay);
- }
- }
- }
- break;
-
- case BluetoothProfile.A2DP_SINK:
- deviceList = proxy.getConnectedDevices();
- if (deviceList.size() > 0) {
- btDevice = deviceList.get(0);
- synchronized (mConnectedDevices) {
- int state = proxy.getConnectionState(btDevice);
- queueMsgUnderWakeLock(mAudioHandler,
- MSG_SET_A2DP_SRC_CONNECTION_STATE,
- state,
- 0,
- btDevice,
- 0 /* delay */);
- }
- }
- break;
-
- case BluetoothProfile.HEADSET:
- synchronized (mScoClients) {
- // Discard timeout message
- mAudioHandler.removeMessages(MSG_BT_HEADSET_CNCT_FAILED);
- mBluetoothHeadset = (BluetoothHeadset) proxy;
- deviceList = mBluetoothHeadset.getConnectedDevices();
- if (deviceList.size() > 0) {
- mBluetoothHeadsetDevice = deviceList.get(0);
- } else {
- mBluetoothHeadsetDevice = null;
- }
- // Refresh SCO audio state
- checkScoAudioState();
- // Continue pending action if any
- if (mScoAudioState == SCO_STATE_ACTIVATE_REQ ||
- mScoAudioState == SCO_STATE_DEACTIVATE_REQ ||
- mScoAudioState == SCO_STATE_DEACTIVATE_EXT_REQ) {
- boolean status = false;
- if (mBluetoothHeadsetDevice != null) {
- switch (mScoAudioState) {
- case SCO_STATE_ACTIVATE_REQ:
- mScoAudioState = SCO_STATE_ACTIVE_INTERNAL;
- if (mScoAudioMode == SCO_MODE_RAW) {
- status = mBluetoothHeadset.connectAudio();
- } else if (mScoAudioMode == SCO_MODE_VIRTUAL_CALL) {
- status = mBluetoothHeadset.startScoUsingVirtualVoiceCall(
- mBluetoothHeadsetDevice);
- } else if (mScoAudioMode == SCO_MODE_VR) {
- status = mBluetoothHeadset.startVoiceRecognition(
- mBluetoothHeadsetDevice);
- }
- break;
- case SCO_STATE_DEACTIVATE_REQ:
- if (mScoAudioMode == SCO_MODE_RAW) {
- status = mBluetoothHeadset.disconnectAudio();
- } else if (mScoAudioMode == SCO_MODE_VIRTUAL_CALL) {
- status = mBluetoothHeadset.stopScoUsingVirtualVoiceCall(
- mBluetoothHeadsetDevice);
- } else if (mScoAudioMode == SCO_MODE_VR) {
- status = mBluetoothHeadset.stopVoiceRecognition(
- mBluetoothHeadsetDevice);
- }
- break;
- case SCO_STATE_DEACTIVATE_EXT_REQ:
- status = mBluetoothHeadset.stopVoiceRecognition(
- mBluetoothHeadsetDevice);
- }
- }
- if (!status) {
- sendMsg(mAudioHandler, MSG_BT_HEADSET_CNCT_FAILED,
- SENDMSG_REPLACE, 0, 0, null, 0);
- }
- }
- }
- break;
-
- default:
- break;
- }
- }
- public void onServiceDisconnected(int profile) {
- switch(profile) {
- case BluetoothProfile.A2DP:
- synchronized (mConnectedDevices) {
- synchronized (mA2dpAvrcpLock) {
- mA2dp = null;
- if (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)) {
- makeA2dpDeviceUnavailableNow(
- mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP));
- }
- }
- }
- break;
-
- case BluetoothProfile.A2DP_SINK:
- synchronized (mConnectedDevices) {
- if (mConnectedDevices.containsKey(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP)) {
- makeA2dpSrcUnavailable(
- mConnectedDevices.get(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP));
- }
- }
- break;
-
- case BluetoothProfile.HEADSET:
- synchronized (mScoClients) {
- mBluetoothHeadset = null;
- }
- break;
-
- default:
- break;
- }
- }
- };
-
- private void onCheckMusicActive() {
- synchronized (mSafeMediaVolumeState) {
- if (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_INACTIVE) {
- int device = getDeviceForStream(AudioSystem.STREAM_MUSIC);
-
- if ((device & mSafeMediaVolumeDevices) != 0) {
- sendMsg(mAudioHandler,
- MSG_CHECK_MUSIC_ACTIVE,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- MUSIC_ACTIVE_POLL_PERIOD_MS);
- int index = mStreamStates[AudioSystem.STREAM_MUSIC].getIndex(device);
- if (AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC, 0) &&
- (index > mSafeMediaVolumeIndex)) {
- // Approximate cumulative active music time
- mMusicActiveMs += MUSIC_ACTIVE_POLL_PERIOD_MS;
- if (mMusicActiveMs > UNSAFE_VOLUME_MUSIC_ACTIVE_MS_MAX) {
- setSafeMediaVolumeEnabled(true);
- mMusicActiveMs = 0;
- }
- saveMusicActiveMs();
- }
- }
- }
- }
- }
-
- private void saveMusicActiveMs() {
- mAudioHandler.obtainMessage(MSG_PERSIST_MUSIC_ACTIVE_MS, mMusicActiveMs, 0).sendToTarget();
- }
-
- private void onConfigureSafeVolume(boolean force) {
- synchronized (mSafeMediaVolumeState) {
- int mcc = mContext.getResources().getConfiguration().mcc;
- if ((mMcc != mcc) || ((mMcc == 0) && force)) {
- mSafeMediaVolumeIndex = mContext.getResources().getInteger(
- com.android.internal.R.integer.config_safe_media_volume_index) * 10;
- boolean safeMediaVolumeEnabled =
- SystemProperties.getBoolean("audio.safemedia.force", false)
- || mContext.getResources().getBoolean(
- com.android.internal.R.bool.config_safe_media_volume_enabled);
-
- // The persisted state is either "disabled" or "active": this is the state applied
- // next time we boot and cannot be "inactive"
- int persistedState;
- if (safeMediaVolumeEnabled) {
- persistedState = SAFE_MEDIA_VOLUME_ACTIVE;
- // The state can already be "inactive" here if the user has forced it before
- // the 30 seconds timeout for forced configuration. In this case we don't reset
- // it to "active".
- if (mSafeMediaVolumeState != SAFE_MEDIA_VOLUME_INACTIVE) {
- if (mMusicActiveMs == 0) {
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_ACTIVE;
- enforceSafeMediaVolume();
- } else {
- // We have existing playback time recorded, already confirmed.
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_INACTIVE;
- }
- }
- } else {
- persistedState = SAFE_MEDIA_VOLUME_DISABLED;
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_DISABLED;
- }
- mMcc = mcc;
- sendMsg(mAudioHandler,
- MSG_PERSIST_SAFE_VOLUME_STATE,
- SENDMSG_QUEUE,
- persistedState,
- 0,
- null,
- 0);
- }
- }
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Internal methods
- ///////////////////////////////////////////////////////////////////////////
-
- /**
- * Checks if the adjustment should change ringer mode instead of just
- * adjusting volume. If so, this will set the proper ringer mode and volume
- * indices on the stream states.
- */
- private int checkForRingerModeChange(int oldIndex, int direction, int step) {
- int result = FLAG_ADJUST_VOLUME;
- int ringerMode = getRingerModeInternal();
-
- switch (ringerMode) {
- case RINGER_MODE_NORMAL:
- if (direction == AudioManager.ADJUST_LOWER) {
- if (mHasVibrator) {
- // "step" is the delta in internal index units corresponding to a
- // change of 1 in UI index units.
- // Because of rounding when rescaling from one stream index range to its alias
- // index range, we cannot simply test oldIndex == step:
- // (step <= oldIndex < 2 * step) is equivalent to: (old UI index == 1)
- if (step <= oldIndex && oldIndex < 2 * step) {
- ringerMode = RINGER_MODE_VIBRATE;
- }
- } else {
- // (oldIndex < step) is equivalent to (old UI index == 0)
- if ((oldIndex < step)
- && VOLUME_SETS_RINGER_MODE_SILENT
- && mPrevVolDirection != AudioManager.ADJUST_LOWER) {
- ringerMode = RINGER_MODE_SILENT;
- }
- }
- }
- break;
- case RINGER_MODE_VIBRATE:
- if (!mHasVibrator) {
- Log.e(TAG, "checkForRingerModeChange() current ringer mode is vibrate" +
- "but no vibrator is present");
- break;
- }
- if ((direction == AudioManager.ADJUST_LOWER)) {
- if (mPrevVolDirection != AudioManager.ADJUST_LOWER) {
- if (VOLUME_SETS_RINGER_MODE_SILENT) {
- ringerMode = RINGER_MODE_SILENT;
- } else {
- result |= AudioManager.FLAG_SHOW_VIBRATE_HINT;
- }
- }
- } else if (direction == AudioManager.ADJUST_RAISE) {
- ringerMode = RINGER_MODE_NORMAL;
- }
- result &= ~FLAG_ADJUST_VOLUME;
- break;
- case RINGER_MODE_SILENT:
- if (direction == AudioManager.ADJUST_RAISE) {
- if (PREVENT_VOLUME_ADJUSTMENT_IF_SILENT) {
- result |= AudioManager.FLAG_SHOW_SILENT_HINT;
- } else {
- if (mHasVibrator) {
- ringerMode = RINGER_MODE_VIBRATE;
- } else {
- ringerMode = RINGER_MODE_NORMAL;
- }
- }
- }
- result &= ~FLAG_ADJUST_VOLUME;
- break;
- default:
- Log.e(TAG, "checkForRingerModeChange() wrong ringer mode: "+ringerMode);
- break;
- }
-
- setRingerMode(ringerMode, TAG + ".checkForRingerModeChange", false /*external*/);
-
- mPrevVolDirection = direction;
-
- return result;
- }
-
- @Override
- public boolean isStreamAffectedByRingerMode(int streamType) {
- return (mRingerModeAffectedStreams & (1 << streamType)) != 0;
- }
-
- private boolean isStreamMutedByRingerMode(int streamType) {
- return (mRingerModeMutedStreams & (1 << streamType)) != 0;
- }
-
- boolean updateRingerModeAffectedStreams() {
- int ringerModeAffectedStreams;
- // make sure settings for ringer mode are consistent with device type: non voice capable
- // devices (tablets) include media stream in silent mode whereas phones don't.
- ringerModeAffectedStreams = Settings.System.getIntForUser(mContentResolver,
- Settings.System.MODE_RINGER_STREAMS_AFFECTED,
- ((1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_NOTIFICATION)|
- (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED)),
- UserHandle.USER_CURRENT);
-
- // ringtone, notification and system streams are always affected by ringer mode
- ringerModeAffectedStreams |= (1 << AudioSystem.STREAM_RING)|
- (1 << AudioSystem.STREAM_NOTIFICATION)|
- (1 << AudioSystem.STREAM_SYSTEM);
-
- switch (mPlatformType) {
- case PLATFORM_TELEVISION:
- ringerModeAffectedStreams = 0;
- break;
- default:
- ringerModeAffectedStreams &= ~(1 << AudioSystem.STREAM_MUSIC);
- break;
- }
-
- synchronized (mCameraSoundForced) {
- if (mCameraSoundForced) {
- ringerModeAffectedStreams &= ~(1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- } else {
- ringerModeAffectedStreams |= (1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- }
- }
- if (mStreamVolumeAlias[AudioSystem.STREAM_DTMF] == AudioSystem.STREAM_RING) {
- ringerModeAffectedStreams |= (1 << AudioSystem.STREAM_DTMF);
- } else {
- ringerModeAffectedStreams &= ~(1 << AudioSystem.STREAM_DTMF);
- }
-
- if (ringerModeAffectedStreams != mRingerModeAffectedStreams) {
- Settings.System.putIntForUser(mContentResolver,
- Settings.System.MODE_RINGER_STREAMS_AFFECTED,
- ringerModeAffectedStreams,
- UserHandle.USER_CURRENT);
- mRingerModeAffectedStreams = ringerModeAffectedStreams;
- return true;
- }
- return false;
- }
-
- public boolean isStreamAffectedByMute(int streamType) {
- return (mMuteAffectedStreams & (1 << streamType)) != 0;
- }
-
- private void ensureValidDirection(int direction) {
- if (direction < AudioManager.ADJUST_LOWER || direction > AudioManager.ADJUST_RAISE) {
- throw new IllegalArgumentException("Bad direction " + direction);
- }
- }
-
- private void ensureValidSteps(int steps) {
- if (Math.abs(steps) > MAX_BATCH_VOLUME_ADJUST_STEPS) {
- throw new IllegalArgumentException("Bad volume adjust steps " + steps);
- }
- }
-
- private void ensureValidStreamType(int streamType) {
- if (streamType < 0 || streamType >= mStreamStates.length) {
- throw new IllegalArgumentException("Bad stream type " + streamType);
- }
- }
-
- private boolean isInCommunication() {
- boolean IsInCall = false;
-
- TelecomManager telecomManager =
- (TelecomManager) mContext.getSystemService(Context.TELECOM_SERVICE);
-
- final long ident = Binder.clearCallingIdentity();
- IsInCall = telecomManager.isInCall();
- Binder.restoreCallingIdentity(ident);
-
- return (IsInCall || getMode() == AudioManager.MODE_IN_COMMUNICATION);
- }
-
- /**
- * For code clarity for getActiveStreamType(int)
- * @param delay_ms max time since last STREAM_MUSIC activity to consider
- * @return true if STREAM_MUSIC is active in streams handled by AudioFlinger now or
- * in the last "delay_ms" ms.
- */
- private boolean isAfMusicActiveRecently(int delay_ms) {
- return AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC, delay_ms)
- || AudioSystem.isStreamActiveRemotely(AudioSystem.STREAM_MUSIC, delay_ms);
- }
-
- private int getActiveStreamType(int suggestedStreamType) {
- switch (mPlatformType) {
- case PLATFORM_VOICE:
- if (isInCommunication()) {
- if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION)
- == AudioSystem.FORCE_BT_SCO) {
- // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO...");
- return AudioSystem.STREAM_BLUETOOTH_SCO;
- } else {
- // Log.v(TAG, "getActiveStreamType: Forcing STREAM_VOICE_CALL...");
- return AudioSystem.STREAM_VOICE_CALL;
- }
- } else if (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- if (isAfMusicActiveRecently(StreamOverride.sDelayMs)) {
- if (DEBUG_VOL)
- Log.v(TAG, "getActiveStreamType: Forcing STREAM_MUSIC stream active");
- return AudioSystem.STREAM_MUSIC;
- } else {
- if (DEBUG_VOL)
- Log.v(TAG, "getActiveStreamType: Forcing STREAM_RING b/c default");
- return AudioSystem.STREAM_RING;
- }
- } else if (isAfMusicActiveRecently(0)) {
- if (DEBUG_VOL)
- Log.v(TAG, "getActiveStreamType: Forcing STREAM_MUSIC stream active");
- return AudioSystem.STREAM_MUSIC;
- }
- break;
- case PLATFORM_TELEVISION:
- if (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- // TV always defaults to STREAM_MUSIC
- return AudioSystem.STREAM_MUSIC;
- }
- break;
- default:
- if (isInCommunication()) {
- if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION)
- == AudioSystem.FORCE_BT_SCO) {
- if (DEBUG_VOL) Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO");
- return AudioSystem.STREAM_BLUETOOTH_SCO;
- } else {
- if (DEBUG_VOL) Log.v(TAG, "getActiveStreamType: Forcing STREAM_VOICE_CALL");
- return AudioSystem.STREAM_VOICE_CALL;
- }
- } else if (AudioSystem.isStreamActive(AudioSystem.STREAM_NOTIFICATION,
- StreamOverride.sDelayMs) ||
- AudioSystem.isStreamActive(AudioSystem.STREAM_RING,
- StreamOverride.sDelayMs)) {
- if (DEBUG_VOL) Log.v(TAG, "getActiveStreamType: Forcing STREAM_NOTIFICATION");
- return AudioSystem.STREAM_NOTIFICATION;
- } else if (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- if (isAfMusicActiveRecently(StreamOverride.sDelayMs)) {
- if (DEBUG_VOL) Log.v(TAG, "getActiveStreamType: forcing STREAM_MUSIC");
- return AudioSystem.STREAM_MUSIC;
- } else {
- if (DEBUG_VOL) Log.v(TAG,
- "getActiveStreamType: using STREAM_NOTIFICATION as default");
- return AudioSystem.STREAM_NOTIFICATION;
- }
- }
- break;
- }
- if (DEBUG_VOL) Log.v(TAG, "getActiveStreamType: Returning suggested type "
- + suggestedStreamType);
- return suggestedStreamType;
- }
-
- private void broadcastRingerMode(String action, int ringerMode) {
- // Send sticky broadcast
- Intent broadcast = new Intent(action);
- broadcast.putExtra(AudioManager.EXTRA_RINGER_MODE, ringerMode);
- broadcast.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT
- | Intent.FLAG_RECEIVER_REPLACE_PENDING);
- sendStickyBroadcastToAll(broadcast);
- }
-
- private void broadcastVibrateSetting(int vibrateType) {
- // Send broadcast
- if (ActivityManagerNative.isSystemReady()) {
- Intent broadcast = new Intent(AudioManager.VIBRATE_SETTING_CHANGED_ACTION);
- broadcast.putExtra(AudioManager.EXTRA_VIBRATE_TYPE, vibrateType);
- broadcast.putExtra(AudioManager.EXTRA_VIBRATE_SETTING, getVibrateSetting(vibrateType));
- sendBroadcastToAll(broadcast);
- }
- }
-
- // Message helper methods
- /**
- * Queue a message on the given handler's message queue, after acquiring the service wake lock.
- * Note that the wake lock needs to be released after the message has been handled.
- */
- private void queueMsgUnderWakeLock(Handler handler, int msg,
- int arg1, int arg2, Object obj, int delay) {
- final long ident = Binder.clearCallingIdentity();
- // Always acquire the wake lock as AudioService because it is released by the
- // message handler.
- mAudioEventWakeLock.acquire();
- Binder.restoreCallingIdentity(ident);
- sendMsg(handler, msg, SENDMSG_QUEUE, arg1, arg2, obj, delay);
- }
-
- private static void sendMsg(Handler handler, int msg,
- int existingMsgPolicy, int arg1, int arg2, Object obj, int delay) {
-
- if (existingMsgPolicy == SENDMSG_REPLACE) {
- handler.removeMessages(msg);
- } else if (existingMsgPolicy == SENDMSG_NOOP && handler.hasMessages(msg)) {
- return;
- }
- synchronized (mLastDeviceConnectMsgTime) {
- long time = SystemClock.uptimeMillis() + delay;
- handler.sendMessageAtTime(handler.obtainMessage(msg, arg1, arg2, obj), time);
- if (msg == MSG_SET_WIRED_DEVICE_CONNECTION_STATE ||
- msg == MSG_SET_A2DP_SRC_CONNECTION_STATE ||
- msg == MSG_SET_A2DP_SINK_CONNECTION_STATE) {
- mLastDeviceConnectMsgTime = time;
- }
- }
- }
-
- boolean checkAudioSettingsPermission(String method) {
- if (mContext.checkCallingOrSelfPermission(android.Manifest.permission.MODIFY_AUDIO_SETTINGS)
- == PackageManager.PERMISSION_GRANTED) {
- return true;
- }
- String msg = "Audio Settings Permission Denial: " + method + " from pid="
- + Binder.getCallingPid()
- + ", uid=" + Binder.getCallingUid();
- Log.w(TAG, msg);
- return false;
- }
-
- private int getDeviceForStream(int stream) {
- int device = AudioSystem.getDevicesForStream(stream);
- if ((device & (device - 1)) != 0) {
- // Multiple device selection is either:
- // - speaker + one other device: give priority to speaker in this case.
- // - one A2DP device + another device: happens with duplicated output. In this case
- // retain the device on the A2DP output as the other must not correspond to an active
- // selection if not the speaker.
- // - HDMI-CEC system audio mode only output: give priority to available item in order.
- if ((device & AudioSystem.DEVICE_OUT_SPEAKER) != 0) {
- device = AudioSystem.DEVICE_OUT_SPEAKER;
- } else if ((device & AudioSystem.DEVICE_OUT_HDMI_ARC) != 0) {
- device = AudioSystem.DEVICE_OUT_HDMI_ARC;
- } else if ((device & AudioSystem.DEVICE_OUT_SPDIF) != 0) {
- device = AudioSystem.DEVICE_OUT_SPDIF;
- } else if ((device & AudioSystem.DEVICE_OUT_AUX_LINE) != 0) {
- device = AudioSystem.DEVICE_OUT_AUX_LINE;
- } else {
- device &= AudioSystem.DEVICE_OUT_ALL_A2DP;
- }
- }
- return device;
- }
-
- public void setWiredDeviceConnectionState(int device, int state, String name) {
- synchronized (mConnectedDevices) {
- int delay = checkSendBecomingNoisyIntent(device, state);
- queueMsgUnderWakeLock(mAudioHandler,
- MSG_SET_WIRED_DEVICE_CONNECTION_STATE,
- device,
- state,
- name,
- delay);
- }
- }
-
- public int setBluetoothA2dpDeviceConnectionState(BluetoothDevice device, int state, int profile)
- {
- int delay;
- if (profile != BluetoothProfile.A2DP && profile != BluetoothProfile.A2DP_SINK) {
- throw new IllegalArgumentException("invalid profile " + profile);
- }
- synchronized (mConnectedDevices) {
- if (profile == BluetoothProfile.A2DP) {
- delay = checkSendBecomingNoisyIntent(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
- (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0);
- } else {
- delay = 0;
- }
- queueMsgUnderWakeLock(mAudioHandler,
- (profile == BluetoothProfile.A2DP ?
- MSG_SET_A2DP_SINK_CONNECTION_STATE : MSG_SET_A2DP_SRC_CONNECTION_STATE),
- state,
- 0,
- device,
- delay);
- }
- return delay;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Inner classes
- ///////////////////////////////////////////////////////////////////////////
-
- // NOTE: Locking order for synchronized objects related to volume or ringer mode management:
- // 1 mScoclient OR mSafeMediaVolumeState
- // 2 mSetModeDeathHandlers
- // 3 mSettingsLock
- // 4 VolumeStreamState.class
- // 5 mCameraSoundForced
- public class VolumeStreamState {
- private final int mStreamType;
-
- private String mVolumeIndexSettingName;
- private int mIndexMax;
- private final ConcurrentHashMap<Integer, Integer> mIndex =
- new ConcurrentHashMap<Integer, Integer>(8, 0.75f, 4);
- private ArrayList<VolumeDeathHandler> mDeathHandlers; //handles mute/solo clients death
-
- private VolumeStreamState(String settingName, int streamType) {
-
- mVolumeIndexSettingName = settingName;
-
- mStreamType = streamType;
- mIndexMax = MAX_STREAM_VOLUME[streamType];
- AudioSystem.initStreamVolume(streamType, 0, mIndexMax);
- mIndexMax *= 10;
-
- // mDeathHandlers must be created before calling readSettings()
- mDeathHandlers = new ArrayList<VolumeDeathHandler>();
-
- readSettings();
- }
-
- public String getSettingNameForDevice(int device) {
- String name = mVolumeIndexSettingName;
- String suffix = AudioSystem.getOutputDeviceName(device);
- if (suffix.isEmpty()) {
- return name;
- }
- return name + "_" + suffix;
- }
-
- public void readSettings() {
- synchronized (VolumeStreamState.class) {
- // force maximum volume on all streams if fixed volume property
- // or master volume property is set
- if (mUseFixedVolume || mUseMasterVolume) {
- mIndex.put(AudioSystem.DEVICE_OUT_DEFAULT, mIndexMax);
- return;
- }
- // do not read system stream volume from settings: this stream is always aliased
- // to another stream type and its volume is never persisted. Values in settings can
- // only be stale values
- if ((mStreamType == AudioSystem.STREAM_SYSTEM) ||
- (mStreamType == AudioSystem.STREAM_SYSTEM_ENFORCED)) {
- int index = 10 * DEFAULT_STREAM_VOLUME[mStreamType];
- synchronized (mCameraSoundForced) {
- if (mCameraSoundForced) {
- index = mIndexMax;
- }
- }
- mIndex.put(AudioSystem.DEVICE_OUT_DEFAULT, index);
- return;
- }
-
- int remainingDevices = AudioSystem.DEVICE_OUT_ALL;
-
- for (int i = 0; remainingDevices != 0; i++) {
- int device = (1 << i);
- if ((device & remainingDevices) == 0) {
- continue;
- }
- remainingDevices &= ~device;
-
- // retrieve current volume for device
- String name = getSettingNameForDevice(device);
- // if no volume stored for current stream and device, use default volume if default
- // device, continue otherwise
- int defaultIndex = (device == AudioSystem.DEVICE_OUT_DEFAULT) ?
- DEFAULT_STREAM_VOLUME[mStreamType] : -1;
- int index = Settings.System.getIntForUser(
- mContentResolver, name, defaultIndex, UserHandle.USER_CURRENT);
- if (index == -1) {
- continue;
- }
-
- mIndex.put(device, getValidIndex(10 * index));
- }
- }
- }
-
- // must be called while synchronized VolumeStreamState.class
- public void applyDeviceVolume_syncVSS(int device) {
- int index;
- if (isMuted_syncVSS()) {
- index = 0;
- } else if (((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 && mAvrcpAbsVolSupported)
- || ((device & mFullVolumeDevices) != 0)) {
- index = (mIndexMax + 5)/10;
- } else {
- index = (getIndex(device) + 5)/10;
- }
- AudioSystem.setStreamVolumeIndex(mStreamType, index, device);
- }
-
- public void applyAllVolumes() {
- synchronized (VolumeStreamState.class) {
- // apply default volume first: by convention this will reset all
- // devices volumes in audio policy manager to the supplied value
- int index;
- if (isMuted_syncVSS()) {
- index = 0;
- } else {
- index = (getIndex(AudioSystem.DEVICE_OUT_DEFAULT) + 5)/10;
- }
- AudioSystem.setStreamVolumeIndex(mStreamType, index, AudioSystem.DEVICE_OUT_DEFAULT);
- // then apply device specific volumes
- Set set = mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- int device = ((Integer)entry.getKey()).intValue();
- if (device != AudioSystem.DEVICE_OUT_DEFAULT) {
- if (isMuted_syncVSS()) {
- index = 0;
- } else if (((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 &&
- mAvrcpAbsVolSupported)
- || ((device & mFullVolumeDevices) != 0))
- {
- index = (mIndexMax + 5)/10;
- } else {
- index = ((Integer)entry.getValue() + 5)/10;
- }
- AudioSystem.setStreamVolumeIndex(mStreamType, index, device);
- }
- }
- }
- }
-
- public boolean adjustIndex(int deltaIndex, int device) {
- return setIndex(getIndex(device) + deltaIndex,
- device);
- }
-
- public boolean setIndex(int index, int device) {
- synchronized (VolumeStreamState.class) {
- int oldIndex = getIndex(device);
- index = getValidIndex(index);
- synchronized (mCameraSoundForced) {
- if ((mStreamType == AudioSystem.STREAM_SYSTEM_ENFORCED) && mCameraSoundForced) {
- index = mIndexMax;
- }
- }
- mIndex.put(device, index);
-
- if (oldIndex != index) {
- // Apply change to all streams using this one as alias
- // if changing volume of current device, also change volume of current
- // device on aliased stream
- boolean currentDevice = (device == getDeviceForStream(mStreamType));
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- if (streamType != mStreamType &&
- mStreamVolumeAlias[streamType] == mStreamType) {
- int scaledIndex = rescaleIndex(index, mStreamType, streamType);
- mStreamStates[streamType].setIndex(scaledIndex,
- device);
- if (currentDevice) {
- mStreamStates[streamType].setIndex(scaledIndex,
- getDeviceForStream(streamType));
- }
- }
- }
- return true;
- } else {
- return false;
- }
- }
- }
-
- public int getIndex(int device) {
- synchronized (VolumeStreamState.class) {
- Integer index = mIndex.get(device);
- if (index == null) {
- // there is always an entry for AudioSystem.DEVICE_OUT_DEFAULT
- index = mIndex.get(AudioSystem.DEVICE_OUT_DEFAULT);
- }
- return index.intValue();
- }
- }
-
- public int getMaxIndex() {
- return mIndexMax;
- }
-
- public void setAllIndexes(VolumeStreamState srcStream) {
- synchronized (VolumeStreamState.class) {
- int srcStreamType = srcStream.getStreamType();
- // apply default device volume from source stream to all devices first in case
- // some devices are present in this stream state but not in source stream state
- int index = srcStream.getIndex(AudioSystem.DEVICE_OUT_DEFAULT);
- index = rescaleIndex(index, srcStreamType, mStreamType);
- Set set = mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- entry.setValue(index);
- }
- // Now apply actual volume for devices in source stream state
- set = srcStream.mIndex.entrySet();
- i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- int device = ((Integer)entry.getKey()).intValue();
- index = ((Integer)entry.getValue()).intValue();
- index = rescaleIndex(index, srcStreamType, mStreamType);
-
- setIndex(index, device);
- }
- }
- }
-
- public void setAllIndexesToMax() {
- synchronized (VolumeStreamState.class) {
- Set set = mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- entry.setValue(mIndexMax);
- }
- }
- }
-
- public void mute(IBinder cb, boolean state) {
- synchronized (VolumeStreamState.class) {
- VolumeDeathHandler handler = getDeathHandler_syncVSS(cb, state);
- if (handler == null) {
- Log.e(TAG, "Could not get client death handler for stream: "+mStreamType);
- return;
- }
- handler.mute_syncVSS(state);
- }
- }
-
- public int getStreamType() {
- return mStreamType;
- }
-
- public void checkFixedVolumeDevices() {
- synchronized (VolumeStreamState.class) {
- // ignore settings for fixed volume devices: volume should always be at max or 0
- if (mStreamVolumeAlias[mStreamType] == AudioSystem.STREAM_MUSIC) {
- Set set = mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- int device = ((Integer)entry.getKey()).intValue();
- int index = ((Integer)entry.getValue()).intValue();
- if (((device & mFullVolumeDevices) != 0)
- || (((device & mFixedVolumeDevices) != 0) && index != 0)) {
- entry.setValue(mIndexMax);
- }
- applyDeviceVolume_syncVSS(device);
- }
- }
- }
- }
-
- private int getValidIndex(int index) {
- if (index < 0) {
- return 0;
- } else if (mUseFixedVolume || mUseMasterVolume || index > mIndexMax) {
- return mIndexMax;
- }
-
- return index;
- }
-
- private class VolumeDeathHandler implements IBinder.DeathRecipient {
- private IBinder mICallback; // To be notified of client's death
- private int mMuteCount; // Number of active mutes for this client
-
- VolumeDeathHandler(IBinder cb) {
- mICallback = cb;
- }
-
- // must be called while synchronized VolumeStreamState.class
- public void mute_syncVSS(boolean state) {
- boolean updateVolume = false;
- if (state) {
- if (mMuteCount == 0) {
- // Register for client death notification
- try {
- // mICallback can be 0 if muted by AudioService
- if (mICallback != null) {
- mICallback.linkToDeath(this, 0);
- }
- VolumeStreamState.this.mDeathHandlers.add(this);
- // If the stream is not yet muted by any client, set level to 0
- if (!VolumeStreamState.this.isMuted_syncVSS()) {
- updateVolume = true;
- }
- } catch (RemoteException e) {
- // Client has died!
- binderDied();
- return;
- }
- } else {
- Log.w(TAG, "stream: "+mStreamType+" was already muted by this client");
- }
- mMuteCount++;
- } else {
- if (mMuteCount == 0) {
- Log.e(TAG, "unexpected unmute for stream: "+mStreamType);
- } else {
- mMuteCount--;
- if (mMuteCount == 0) {
- // Unregister from client death notification
- VolumeStreamState.this.mDeathHandlers.remove(this);
- // mICallback can be 0 if muted by AudioService
- if (mICallback != null) {
- mICallback.unlinkToDeath(this, 0);
- }
- if (!VolumeStreamState.this.isMuted_syncVSS()) {
- updateVolume = true;
- }
- }
- }
- }
- if (updateVolume) {
- sendMsg(mAudioHandler,
- MSG_SET_ALL_VOLUMES,
- SENDMSG_QUEUE,
- 0,
- 0,
- VolumeStreamState.this, 0);
- }
- }
-
- public void binderDied() {
- Log.w(TAG, "Volume service client died for stream: "+mStreamType);
- synchronized (VolumeStreamState.class) {
- if (mMuteCount != 0) {
- // Reset all active mute requests from this client.
- mMuteCount = 1;
- mute_syncVSS(false);
- }
- }
- }
- }
-
- private int muteCount() {
- int count = 0;
- int size = mDeathHandlers.size();
- for (int i = 0; i < size; i++) {
- count += mDeathHandlers.get(i).mMuteCount;
- }
- return count;
- }
-
- // must be called while synchronized VolumeStreamState.class
- private boolean isMuted_syncVSS() {
- return muteCount() != 0;
- }
-
- // must be called while synchronized VolumeStreamState.class
- private VolumeDeathHandler getDeathHandler_syncVSS(IBinder cb, boolean state) {
- VolumeDeathHandler handler;
- int size = mDeathHandlers.size();
- for (int i = 0; i < size; i++) {
- handler = mDeathHandlers.get(i);
- if (cb == handler.mICallback) {
- return handler;
- }
- }
- // If this is the first mute request for this client, create a new
- // client death handler. Otherwise, it is an out of sequence unmute request.
- if (state) {
- handler = new VolumeDeathHandler(cb);
- } else {
- Log.w(TAG, "stream was not muted by this client");
- handler = null;
- }
- return handler;
- }
-
- private void dump(PrintWriter pw) {
- pw.print(" Mute count: ");
- pw.println(muteCount());
- pw.print(" Max: ");
- pw.println((mIndexMax + 5) / 10);
- pw.print(" Current: ");
- Set set = mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- final int device = (Integer) entry.getKey();
- pw.print(Integer.toHexString(device));
- final String deviceName = device == AudioSystem.DEVICE_OUT_DEFAULT ? "default"
- : AudioSystem.getOutputDeviceName(device);
- if (!deviceName.isEmpty()) {
- pw.print(" (");
- pw.print(deviceName);
- pw.print(")");
- }
- pw.print(": ");
- final int index = (((Integer) entry.getValue()) + 5) / 10;
- pw.print(index);
- if (i.hasNext()) {
- pw.print(", ");
- }
- }
- }
- }
-
- /** Thread that handles native AudioSystem control. */
- private class AudioSystemThread extends Thread {
- AudioSystemThread() {
- super("AudioService");
- }
-
- @Override
- public void run() {
- // Set this thread up so the handler will work on it
- Looper.prepare();
-
- synchronized(AudioService.this) {
- mAudioHandler = new AudioHandler();
-
- // Notify that the handler has been created
- AudioService.this.notify();
- }
-
- // Listen for volume change requests that are set by VolumePanel
- Looper.loop();
- }
- }
-
- /** Handles internal volume messages in separate volume thread. */
- private class AudioHandler extends Handler {
-
- private void setDeviceVolume(VolumeStreamState streamState, int device) {
-
- synchronized (VolumeStreamState.class) {
- // Apply volume
- streamState.applyDeviceVolume_syncVSS(device);
-
- // Apply change to all streams using this one as alias
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- if (streamType != streamState.mStreamType &&
- mStreamVolumeAlias[streamType] == streamState.mStreamType) {
- // Make sure volume is also maxed out on A2DP device for aliased stream
- // that may have a different device selected
- int streamDevice = getDeviceForStream(streamType);
- if ((device != streamDevice) && mAvrcpAbsVolSupported &&
- ((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0)) {
- mStreamStates[streamType].applyDeviceVolume_syncVSS(device);
- }
- mStreamStates[streamType].applyDeviceVolume_syncVSS(streamDevice);
- }
- }
- }
- // Post a persist volume msg
- sendMsg(mAudioHandler,
- MSG_PERSIST_VOLUME,
- SENDMSG_QUEUE,
- device,
- 0,
- streamState,
- PERSIST_DELAY);
-
- }
-
- private void setAllVolumes(VolumeStreamState streamState) {
-
- // Apply volume
- streamState.applyAllVolumes();
-
- // Apply change to all streams using this one as alias
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- if (streamType != streamState.mStreamType &&
- mStreamVolumeAlias[streamType] == streamState.mStreamType) {
- mStreamStates[streamType].applyAllVolumes();
- }
- }
- }
-
- private void persistVolume(VolumeStreamState streamState, int device) {
- if (mUseFixedVolume) {
- return;
- }
- if (isPlatformTelevision() && (streamState.mStreamType != AudioSystem.STREAM_MUSIC)) {
- return;
- }
- System.putIntForUser(mContentResolver,
- streamState.getSettingNameForDevice(device),
- (streamState.getIndex(device) + 5)/ 10,
- UserHandle.USER_CURRENT);
- }
-
- private void persistRingerMode(int ringerMode) {
- if (mUseFixedVolume) {
- return;
- }
- Settings.Global.putInt(mContentResolver, Settings.Global.MODE_RINGER, ringerMode);
- }
-
- private boolean onLoadSoundEffects() {
- int status;
-
- synchronized (mSoundEffectsLock) {
- if (!mSystemReady) {
- Log.w(TAG, "onLoadSoundEffects() called before boot complete");
- return false;
- }
-
- if (mSoundPool != null) {
- return true;
- }
-
- loadTouchSoundAssets();
-
- mSoundPool = new SoundPool.Builder()
- .setMaxStreams(NUM_SOUNDPOOL_CHANNELS)
- .setAudioAttributes(new AudioAttributes.Builder()
- .setUsage(AudioAttributes.USAGE_ASSISTANCE_SONIFICATION)
- .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
- .build())
- .build();
- mSoundPoolCallBack = null;
- mSoundPoolListenerThread = new SoundPoolListenerThread();
- mSoundPoolListenerThread.start();
- int attempts = 3;
- while ((mSoundPoolCallBack == null) && (attempts-- > 0)) {
- try {
- // Wait for mSoundPoolCallBack to be set by the other thread
- mSoundEffectsLock.wait(SOUND_EFFECTS_LOAD_TIMEOUT_MS);
- } catch (InterruptedException e) {
- Log.w(TAG, "Interrupted while waiting sound pool listener thread.");
- }
- }
-
- if (mSoundPoolCallBack == null) {
- Log.w(TAG, "onLoadSoundEffects() SoundPool listener or thread creation error");
- if (mSoundPoolLooper != null) {
- mSoundPoolLooper.quit();
- mSoundPoolLooper = null;
- }
- mSoundPoolListenerThread = null;
- mSoundPool.release();
- mSoundPool = null;
- return false;
- }
- /*
- * poolId table: The value -1 in this table indicates that corresponding
- * file (same index in SOUND_EFFECT_FILES[] has not been loaded.
- * Once loaded, the value in poolId is the sample ID and the same
- * sample can be reused for another effect using the same file.
- */
- int[] poolId = new int[SOUND_EFFECT_FILES.size()];
- for (int fileIdx = 0; fileIdx < SOUND_EFFECT_FILES.size(); fileIdx++) {
- poolId[fileIdx] = -1;
- }
- /*
- * Effects whose value in SOUND_EFFECT_FILES_MAP[effect][1] is -1 must be loaded.
- * If load succeeds, value in SOUND_EFFECT_FILES_MAP[effect][1] is > 0:
- * this indicates we have a valid sample loaded for this effect.
- */
-
- int numSamples = 0;
- for (int effect = 0; effect < AudioManager.NUM_SOUND_EFFECTS; effect++) {
- // Do not load sample if this effect uses the MediaPlayer
- if (SOUND_EFFECT_FILES_MAP[effect][1] == 0) {
- continue;
- }
- if (poolId[SOUND_EFFECT_FILES_MAP[effect][0]] == -1) {
- String filePath = Environment.getRootDirectory()
- + SOUND_EFFECTS_PATH
- + SOUND_EFFECT_FILES.get(SOUND_EFFECT_FILES_MAP[effect][0]);
- int sampleId = mSoundPool.load(filePath, 0);
- if (sampleId <= 0) {
- Log.w(TAG, "Soundpool could not load file: "+filePath);
- } else {
- SOUND_EFFECT_FILES_MAP[effect][1] = sampleId;
- poolId[SOUND_EFFECT_FILES_MAP[effect][0]] = sampleId;
- numSamples++;
- }
- } else {
- SOUND_EFFECT_FILES_MAP[effect][1] =
- poolId[SOUND_EFFECT_FILES_MAP[effect][0]];
- }
- }
- // wait for all samples to be loaded
- if (numSamples > 0) {
- mSoundPoolCallBack.setSamples(poolId);
-
- attempts = 3;
- status = 1;
- while ((status == 1) && (attempts-- > 0)) {
- try {
- mSoundEffectsLock.wait(SOUND_EFFECTS_LOAD_TIMEOUT_MS);
- status = mSoundPoolCallBack.status();
- } catch (InterruptedException e) {
- Log.w(TAG, "Interrupted while waiting sound pool callback.");
- }
- }
- } else {
- status = -1;
- }
-
- if (mSoundPoolLooper != null) {
- mSoundPoolLooper.quit();
- mSoundPoolLooper = null;
- }
- mSoundPoolListenerThread = null;
- if (status != 0) {
- Log.w(TAG,
- "onLoadSoundEffects(), Error "+status+ " while loading samples");
- for (int effect = 0; effect < AudioManager.NUM_SOUND_EFFECTS; effect++) {
- if (SOUND_EFFECT_FILES_MAP[effect][1] > 0) {
- SOUND_EFFECT_FILES_MAP[effect][1] = -1;
- }
- }
-
- mSoundPool.release();
- mSoundPool = null;
- }
- }
- return (status == 0);
- }
-
- /**
- * Unloads samples from the sound pool.
- * This method can be called to free some memory when
- * sound effects are disabled.
- */
- private void onUnloadSoundEffects() {
- synchronized (mSoundEffectsLock) {
- if (mSoundPool == null) {
- return;
- }
-
- int[] poolId = new int[SOUND_EFFECT_FILES.size()];
- for (int fileIdx = 0; fileIdx < SOUND_EFFECT_FILES.size(); fileIdx++) {
- poolId[fileIdx] = 0;
- }
-
- for (int effect = 0; effect < AudioManager.NUM_SOUND_EFFECTS; effect++) {
- if (SOUND_EFFECT_FILES_MAP[effect][1] <= 0) {
- continue;
- }
- if (poolId[SOUND_EFFECT_FILES_MAP[effect][0]] == 0) {
- mSoundPool.unload(SOUND_EFFECT_FILES_MAP[effect][1]);
- SOUND_EFFECT_FILES_MAP[effect][1] = -1;
- poolId[SOUND_EFFECT_FILES_MAP[effect][0]] = -1;
- }
- }
- mSoundPool.release();
- mSoundPool = null;
- }
- }
-
- private void onPlaySoundEffect(int effectType, int volume) {
- synchronized (mSoundEffectsLock) {
-
- onLoadSoundEffects();
-
- if (mSoundPool == null) {
- return;
- }
- float volFloat;
- // use default if volume is not specified by caller
- if (volume < 0) {
- volFloat = (float)Math.pow(10, (float)sSoundEffectVolumeDb/20);
- } else {
- volFloat = volume / 1000.0f;
- }
-
- if (SOUND_EFFECT_FILES_MAP[effectType][1] > 0) {
- mSoundPool.play(SOUND_EFFECT_FILES_MAP[effectType][1],
- volFloat, volFloat, 0, 0, 1.0f);
- } else {
- MediaPlayer mediaPlayer = new MediaPlayer();
- try {
- String filePath = Environment.getRootDirectory() + SOUND_EFFECTS_PATH +
- SOUND_EFFECT_FILES.get(SOUND_EFFECT_FILES_MAP[effectType][0]);
- mediaPlayer.setDataSource(filePath);
- mediaPlayer.setAudioStreamType(AudioSystem.STREAM_SYSTEM);
- mediaPlayer.prepare();
- mediaPlayer.setVolume(volFloat);
- mediaPlayer.setOnCompletionListener(new OnCompletionListener() {
- public void onCompletion(MediaPlayer mp) {
- cleanupPlayer(mp);
- }
- });
- mediaPlayer.setOnErrorListener(new OnErrorListener() {
- public boolean onError(MediaPlayer mp, int what, int extra) {
- cleanupPlayer(mp);
- return true;
- }
- });
- mediaPlayer.start();
- } catch (IOException ex) {
- Log.w(TAG, "MediaPlayer IOException: "+ex);
- } catch (IllegalArgumentException ex) {
- Log.w(TAG, "MediaPlayer IllegalArgumentException: "+ex);
- } catch (IllegalStateException ex) {
- Log.w(TAG, "MediaPlayer IllegalStateException: "+ex);
- }
- }
- }
- }
-
- private void cleanupPlayer(MediaPlayer mp) {
- if (mp != null) {
- try {
- mp.stop();
- mp.release();
- } catch (IllegalStateException ex) {
- Log.w(TAG, "MediaPlayer IllegalStateException: "+ex);
- }
- }
- }
-
- private void setForceUse(int usage, int config) {
- AudioSystem.setForceUse(usage, config);
- }
-
- private void onPersistSafeVolumeState(int state) {
- Settings.Global.putInt(mContentResolver,
- Settings.Global.AUDIO_SAFE_VOLUME_STATE,
- state);
- }
-
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
-
- case MSG_SET_DEVICE_VOLUME:
- setDeviceVolume((VolumeStreamState) msg.obj, msg.arg1);
- break;
-
- case MSG_SET_ALL_VOLUMES:
- setAllVolumes((VolumeStreamState) msg.obj);
- break;
-
- case MSG_PERSIST_VOLUME:
- persistVolume((VolumeStreamState) msg.obj, msg.arg1);
- break;
-
- case MSG_PERSIST_MASTER_VOLUME:
- if (mUseFixedVolume) {
- return;
- }
- Settings.System.putFloatForUser(mContentResolver,
- Settings.System.VOLUME_MASTER,
- msg.arg1 / (float)1000.0,
- UserHandle.USER_CURRENT);
- break;
-
- case MSG_PERSIST_MASTER_VOLUME_MUTE:
- if (mUseFixedVolume) {
- return;
- }
- Settings.System.putIntForUser(mContentResolver,
- Settings.System.VOLUME_MASTER_MUTE,
- msg.arg1,
- msg.arg2);
- break;
-
- case MSG_PERSIST_RINGER_MODE:
- // note that the value persisted is the current ringer mode, not the
- // value of ringer mode as of the time the request was made to persist
- persistRingerMode(getRingerModeInternal());
- break;
-
- case MSG_MEDIA_SERVER_DIED:
- if (!mSystemReady ||
- (AudioSystem.checkAudioFlinger() != AudioSystem.AUDIO_STATUS_OK)) {
- Log.e(TAG, "Media server died.");
- sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SENDMSG_NOOP, 0, 0,
- null, 500);
- break;
- }
- Log.e(TAG, "Media server started.");
-
- // indicate to audio HAL that we start the reconfiguration phase after a media
- // server crash
- // Note that we only execute this when the media server
- // process restarts after a crash, not the first time it is started.
- AudioSystem.setParameters("restarting=true");
-
- readAndSetLowRamDevice();
-
- // Restore device connection states
- synchronized (mConnectedDevices) {
- Set set = mConnectedDevices.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry device = (Map.Entry)i.next();
- AudioSystem.setDeviceConnectionState(
- ((Integer)device.getKey()).intValue(),
- AudioSystem.DEVICE_STATE_AVAILABLE,
- (String)device.getValue());
- }
- }
- // Restore call state
- AudioSystem.setPhoneState(mMode);
-
- // Restore forced usage for communcations and record
- AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, mForcedUseForComm);
- AudioSystem.setForceUse(AudioSystem.FOR_RECORD, mForcedUseForComm);
- AudioSystem.setForceUse(AudioSystem.FOR_SYSTEM, mCameraSoundForced ?
- AudioSystem.FORCE_SYSTEM_ENFORCED : AudioSystem.FORCE_NONE);
-
- // Restore stream volumes
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- VolumeStreamState streamState = mStreamStates[streamType];
- AudioSystem.initStreamVolume(streamType, 0, (streamState.mIndexMax + 5) / 10);
-
- streamState.applyAllVolumes();
- }
-
- // Restore ringer mode
- setRingerModeInt(getRingerModeInternal(), false);
-
- // Restore master volume
- restoreMasterVolume();
-
- // Reset device orientation (if monitored for this device)
- if (mMonitorOrientation) {
- setOrientationForAudioSystem();
- }
- if (mMonitorRotation) {
- setRotationForAudioSystem();
- }
-
- synchronized (mBluetoothA2dpEnabledLock) {
- AudioSystem.setForceUse(AudioSystem.FOR_MEDIA,
- mBluetoothA2dpEnabled ?
- AudioSystem.FORCE_NONE : AudioSystem.FORCE_NO_BT_A2DP);
- }
-
- synchronized (mSettingsLock) {
- AudioSystem.setForceUse(AudioSystem.FOR_DOCK,
- mDockAudioMediaEnabled ?
- AudioSystem.FORCE_ANALOG_DOCK : AudioSystem.FORCE_NONE);
- }
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- if (mHdmiTvClient != null) {
- setHdmiSystemAudioSupported(mHdmiSystemAudioSupported);
- }
- }
- }
-
- synchronized (mAudioPolicies) {
- for(AudioPolicyProxy policy : mAudioPolicies.values()) {
- policy.connectMixes();
- }
- }
-
- // indicate the end of reconfiguration phase to audio HAL
- AudioSystem.setParameters("restarting=false");
- break;
-
- case MSG_UNLOAD_SOUND_EFFECTS:
- onUnloadSoundEffects();
- break;
-
- case MSG_LOAD_SOUND_EFFECTS:
- //FIXME: onLoadSoundEffects() should be executed in a separate thread as it
- // can take several dozens of milliseconds to complete
- boolean loaded = onLoadSoundEffects();
- if (msg.obj != null) {
- LoadSoundEffectReply reply = (LoadSoundEffectReply)msg.obj;
- synchronized (reply) {
- reply.mStatus = loaded ? 0 : -1;
- reply.notify();
- }
- }
- break;
-
- case MSG_PLAY_SOUND_EFFECT:
- onPlaySoundEffect(msg.arg1, msg.arg2);
- break;
-
- case MSG_BTA2DP_DOCK_TIMEOUT:
- // msg.obj == address of BTA2DP device
- synchronized (mConnectedDevices) {
- makeA2dpDeviceUnavailableNow( (String) msg.obj );
- }
- break;
-
- case MSG_SET_FORCE_USE:
- case MSG_SET_FORCE_BT_A2DP_USE:
- setForceUse(msg.arg1, msg.arg2);
- break;
-
- case MSG_BT_HEADSET_CNCT_FAILED:
- resetBluetoothSco();
- break;
-
- case MSG_SET_WIRED_DEVICE_CONNECTION_STATE:
- onSetWiredDeviceConnectionState(msg.arg1, msg.arg2, (String)msg.obj);
- mAudioEventWakeLock.release();
- break;
-
- case MSG_SET_A2DP_SRC_CONNECTION_STATE:
- onSetA2dpSourceConnectionState((BluetoothDevice)msg.obj, msg.arg1);
- mAudioEventWakeLock.release();
- break;
-
- case MSG_SET_A2DP_SINK_CONNECTION_STATE:
- onSetA2dpSinkConnectionState((BluetoothDevice)msg.obj, msg.arg1);
- mAudioEventWakeLock.release();
- break;
-
- case MSG_REPORT_NEW_ROUTES: {
- int N = mRoutesObservers.beginBroadcast();
- if (N > 0) {
- AudioRoutesInfo routes;
- synchronized (mCurAudioRoutes) {
- routes = new AudioRoutesInfo(mCurAudioRoutes);
- }
- while (N > 0) {
- N--;
- IAudioRoutesObserver obs = mRoutesObservers.getBroadcastItem(N);
- try {
- obs.dispatchAudioRoutesChanged(routes);
- } catch (RemoteException e) {
- }
- }
- }
- mRoutesObservers.finishBroadcast();
- break;
- }
-
- case MSG_CHECK_MUSIC_ACTIVE:
- onCheckMusicActive();
- break;
-
- case MSG_BROADCAST_AUDIO_BECOMING_NOISY:
- onSendBecomingNoisyIntent();
- break;
-
- case MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED:
- case MSG_CONFIGURE_SAFE_MEDIA_VOLUME:
- onConfigureSafeVolume((msg.what == MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED));
- break;
- case MSG_PERSIST_SAFE_VOLUME_STATE:
- onPersistSafeVolumeState(msg.arg1);
- break;
-
- case MSG_BROADCAST_BT_CONNECTION_STATE:
- onBroadcastScoConnectionState(msg.arg1);
- break;
-
- case MSG_SYSTEM_READY:
- onSystemReady();
- break;
-
- case MSG_PERSIST_MUSIC_ACTIVE_MS:
- final int musicActiveMs = msg.arg1;
- Settings.Secure.putIntForUser(mContentResolver,
- Settings.Secure.UNSAFE_VOLUME_MUSIC_ACTIVE_MS, musicActiveMs,
- UserHandle.USER_CURRENT);
- break;
- case MSG_PERSIST_MICROPHONE_MUTE:
- Settings.System.putIntForUser(mContentResolver,
- Settings.System.MICROPHONE_MUTE,
- msg.arg1,
- msg.arg2);
- break;
- }
- }
- }
-
- private class SettingsObserver extends ContentObserver {
-
- SettingsObserver() {
- super(new Handler());
- mContentResolver.registerContentObserver(Settings.System.getUriFor(
- Settings.System.MODE_RINGER_STREAMS_AFFECTED), false, this);
- mContentResolver.registerContentObserver(Settings.Global.getUriFor(
- Settings.Global.DOCK_AUDIO_MEDIA_ENABLED), false, this);
- }
-
- @Override
- public void onChange(boolean selfChange) {
- super.onChange(selfChange);
- // FIXME This synchronized is not necessary if mSettingsLock only protects mRingerMode.
- // However there appear to be some missing locks around mRingerModeMutedStreams
- // and mRingerModeAffectedStreams, so will leave this synchronized for now.
- // mRingerModeMutedStreams and mMuteAffectedStreams are safe (only accessed once).
- synchronized (mSettingsLock) {
- if (updateRingerModeAffectedStreams()) {
- /*
- * Ensure all stream types that should be affected by ringer mode
- * are in the proper state.
- */
- setRingerModeInt(getRingerModeInternal(), false);
- }
- readDockAudioSettings(mContentResolver);
- }
- }
- }
-
- // must be called synchronized on mConnectedDevices
- private void makeA2dpDeviceAvailable(String address) {
- // enable A2DP before notifying A2DP connection to avoid unecessary processing in
- // audio policy manager
- VolumeStreamState streamState = mStreamStates[AudioSystem.STREAM_MUSIC];
- sendMsg(mAudioHandler, MSG_SET_DEVICE_VOLUME, SENDMSG_QUEUE,
- AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, 0, streamState, 0);
- setBluetoothA2dpOnInt(true);
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
- AudioSystem.DEVICE_STATE_AVAILABLE,
- address);
- // Reset A2DP suspend state each time a new sink is connected
- AudioSystem.setParameters("A2dpSuspended=false");
- mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP),
- address);
- }
-
- private void onSendBecomingNoisyIntent() {
- sendBroadcastToAll(new Intent(AudioManager.ACTION_AUDIO_BECOMING_NOISY));
- }
-
- // must be called synchronized on mConnectedDevices
- private void makeA2dpDeviceUnavailableNow(String address) {
- synchronized (mA2dpAvrcpLock) {
- mAvrcpAbsVolSupported = false;
- }
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
- AudioSystem.DEVICE_STATE_UNAVAILABLE,
- address);
- mConnectedDevices.remove(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP);
- synchronized (mCurAudioRoutes) {
- // Remove A2DP routes as well
- if (mCurAudioRoutes.mBluetoothName != null) {
- mCurAudioRoutes.mBluetoothName = null;
- sendMsg(mAudioHandler, MSG_REPORT_NEW_ROUTES,
- SENDMSG_NOOP, 0, 0, null, 0);
- }
- }
- }
-
- // must be called synchronized on mConnectedDevices
- private void makeA2dpDeviceUnavailableLater(String address) {
- // prevent any activity on the A2DP audio output to avoid unwanted
- // reconnection of the sink.
- AudioSystem.setParameters("A2dpSuspended=true");
- // the device will be made unavailable later, so consider it disconnected right away
- mConnectedDevices.remove(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP);
- // send the delayed message to make the device unavailable later
- Message msg = mAudioHandler.obtainMessage(MSG_BTA2DP_DOCK_TIMEOUT, address);
- mAudioHandler.sendMessageDelayed(msg, BTA2DP_DOCK_TIMEOUT_MILLIS);
-
- }
-
- // must be called synchronized on mConnectedDevices
- private void makeA2dpSrcAvailable(String address) {
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP,
- AudioSystem.DEVICE_STATE_AVAILABLE,
- address);
- mConnectedDevices.put( new Integer(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP),
- address);
- }
-
- // must be called synchronized on mConnectedDevices
- private void makeA2dpSrcUnavailable(String address) {
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP,
- AudioSystem.DEVICE_STATE_UNAVAILABLE,
- address);
- mConnectedDevices.remove(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP);
- }
-
- // must be called synchronized on mConnectedDevices
- private void cancelA2dpDeviceTimeout() {
- mAudioHandler.removeMessages(MSG_BTA2DP_DOCK_TIMEOUT);
- }
-
- // must be called synchronized on mConnectedDevices
- private boolean hasScheduledA2dpDockTimeout() {
- return mAudioHandler.hasMessages(MSG_BTA2DP_DOCK_TIMEOUT);
- }
-
- private void onSetA2dpSinkConnectionState(BluetoothDevice btDevice, int state)
- {
- if (DEBUG_VOL) {
- Log.d(TAG, "onSetA2dpSinkConnectionState btDevice="+btDevice+"state="+state);
- }
- if (btDevice == null) {
- return;
- }
- String address = btDevice.getAddress();
- if (!BluetoothAdapter.checkBluetoothAddress(address)) {
- address = "";
- }
-
- synchronized (mConnectedDevices) {
- boolean isConnected =
- (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) &&
- mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP).equals(address));
-
- if (isConnected && state != BluetoothProfile.STATE_CONNECTED) {
- if (btDevice.isBluetoothDock()) {
- if (state == BluetoothProfile.STATE_DISCONNECTED) {
- // introduction of a delay for transient disconnections of docks when
- // power is rapidly turned off/on, this message will be canceled if
- // we reconnect the dock under a preset delay
- makeA2dpDeviceUnavailableLater(address);
- // the next time isConnected is evaluated, it will be false for the dock
- }
- } else {
- makeA2dpDeviceUnavailableNow(address);
- }
- synchronized (mCurAudioRoutes) {
- if (mCurAudioRoutes.mBluetoothName != null) {
- mCurAudioRoutes.mBluetoothName = null;
- sendMsg(mAudioHandler, MSG_REPORT_NEW_ROUTES,
- SENDMSG_NOOP, 0, 0, null, 0);
- }
- }
- } else if (!isConnected && state == BluetoothProfile.STATE_CONNECTED) {
- if (btDevice.isBluetoothDock()) {
- // this could be a reconnection after a transient disconnection
- cancelA2dpDeviceTimeout();
- mDockAddress = address;
- } else {
- // this could be a connection of another A2DP device before the timeout of
- // a dock: cancel the dock timeout, and make the dock unavailable now
- if(hasScheduledA2dpDockTimeout()) {
- cancelA2dpDeviceTimeout();
- makeA2dpDeviceUnavailableNow(mDockAddress);
- }
- }
- makeA2dpDeviceAvailable(address);
- synchronized (mCurAudioRoutes) {
- String name = btDevice.getAliasName();
- if (!TextUtils.equals(mCurAudioRoutes.mBluetoothName, name)) {
- mCurAudioRoutes.mBluetoothName = name;
- sendMsg(mAudioHandler, MSG_REPORT_NEW_ROUTES,
- SENDMSG_NOOP, 0, 0, null, 0);
- }
- }
- }
- }
- }
-
- private void onSetA2dpSourceConnectionState(BluetoothDevice btDevice, int state)
- {
- if (DEBUG_VOL) {
- Log.d(TAG, "onSetA2dpSourceConnectionState btDevice="+btDevice+" state="+state);
- }
- if (btDevice == null) {
- return;
- }
- String address = btDevice.getAddress();
- if (!BluetoothAdapter.checkBluetoothAddress(address)) {
- address = "";
- }
-
- synchronized (mConnectedDevices) {
- boolean isConnected =
- (mConnectedDevices.containsKey(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP) &&
- mConnectedDevices.get(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP).equals(address));
-
- if (isConnected && state != BluetoothProfile.STATE_CONNECTED) {
- makeA2dpSrcUnavailable(address);
- } else if (!isConnected && state == BluetoothProfile.STATE_CONNECTED) {
- makeA2dpSrcAvailable(address);
- }
- }
- }
-
- public void avrcpSupportsAbsoluteVolume(String address, boolean support) {
- // address is not used for now, but may be used when multiple a2dp devices are supported
- synchronized (mA2dpAvrcpLock) {
- mAvrcpAbsVolSupported = support;
- sendMsg(mAudioHandler, MSG_SET_DEVICE_VOLUME, SENDMSG_QUEUE,
- AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, 0,
- mStreamStates[AudioSystem.STREAM_MUSIC], 0);
- sendMsg(mAudioHandler, MSG_SET_DEVICE_VOLUME, SENDMSG_QUEUE,
- AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, 0,
- mStreamStates[AudioSystem.STREAM_RING], 0);
- }
- }
-
- private boolean handleDeviceConnection(boolean connected, int device, String params) {
- synchronized (mConnectedDevices) {
- boolean isConnected = (mConnectedDevices.containsKey(device) &&
- (params.isEmpty() || mConnectedDevices.get(device).equals(params)));
-
- if (isConnected && !connected) {
- AudioSystem.setDeviceConnectionState(device,
- AudioSystem.DEVICE_STATE_UNAVAILABLE,
- mConnectedDevices.get(device));
- mConnectedDevices.remove(device);
- return true;
- } else if (!isConnected && connected) {
- AudioSystem.setDeviceConnectionState(device,
- AudioSystem.DEVICE_STATE_AVAILABLE,
- params);
- mConnectedDevices.put(new Integer(device), params);
- return true;
- }
- }
- return false;
- }
-
- // Devices which removal triggers intent ACTION_AUDIO_BECOMING_NOISY. The intent is only
- // sent if none of these devices is connected.
- int mBecomingNoisyIntentDevices =
- AudioSystem.DEVICE_OUT_WIRED_HEADSET | AudioSystem.DEVICE_OUT_WIRED_HEADPHONE |
- AudioSystem.DEVICE_OUT_ALL_A2DP | AudioSystem.DEVICE_OUT_HDMI |
- AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET | AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET |
- AudioSystem.DEVICE_OUT_ALL_USB | AudioSystem.DEVICE_OUT_LINE;
-
- // must be called before removing the device from mConnectedDevices
- private int checkSendBecomingNoisyIntent(int device, int state) {
- int delay = 0;
- if ((state == 0) && ((device & mBecomingNoisyIntentDevices) != 0)) {
- int devices = 0;
- for (int dev : mConnectedDevices.keySet()) {
- if (((dev & AudioSystem.DEVICE_BIT_IN) == 0) &&
- ((dev & mBecomingNoisyIntentDevices) != 0)) {
- devices |= dev;
- }
- }
- if (devices == device) {
- sendMsg(mAudioHandler,
- MSG_BROADCAST_AUDIO_BECOMING_NOISY,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- 0);
- delay = 1000;
- }
- }
-
- if (mAudioHandler.hasMessages(MSG_SET_A2DP_SRC_CONNECTION_STATE) ||
- mAudioHandler.hasMessages(MSG_SET_A2DP_SINK_CONNECTION_STATE) ||
- mAudioHandler.hasMessages(MSG_SET_WIRED_DEVICE_CONNECTION_STATE)) {
- synchronized (mLastDeviceConnectMsgTime) {
- long time = SystemClock.uptimeMillis();
- if (mLastDeviceConnectMsgTime > time) {
- delay = (int)(mLastDeviceConnectMsgTime - time) + 30;
- }
- }
- }
- return delay;
- }
-
- private void sendDeviceConnectionIntent(int device, int state, String name)
- {
- Intent intent = new Intent();
-
- intent.putExtra("state", state);
- intent.putExtra("name", name);
- intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY);
-
- int connType = 0;
-
- if (device == AudioSystem.DEVICE_OUT_WIRED_HEADSET) {
- connType = AudioRoutesInfo.MAIN_HEADSET;
- intent.setAction(Intent.ACTION_HEADSET_PLUG);
- intent.putExtra("microphone", 1);
- } else if (device == AudioSystem.DEVICE_OUT_WIRED_HEADPHONE ||
- device == AudioSystem.DEVICE_OUT_LINE) {
- /*do apps care about line-out vs headphones?*/
- connType = AudioRoutesInfo.MAIN_HEADPHONES;
- intent.setAction(Intent.ACTION_HEADSET_PLUG);
- intent.putExtra("microphone", 0);
- } else if (device == AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET) {
- connType = AudioRoutesInfo.MAIN_DOCK_SPEAKERS;
- intent.setAction(AudioManager.ACTION_ANALOG_AUDIO_DOCK_PLUG);
- } else if (device == AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET) {
- connType = AudioRoutesInfo.MAIN_DOCK_SPEAKERS;
- intent.setAction(AudioManager.ACTION_DIGITAL_AUDIO_DOCK_PLUG);
- } else if (device == AudioSystem.DEVICE_OUT_HDMI ||
- device == AudioSystem.DEVICE_OUT_HDMI_ARC) {
- connType = AudioRoutesInfo.MAIN_HDMI;
- configureHdmiPlugIntent(intent, state);
- }
-
- synchronized (mCurAudioRoutes) {
- if (connType != 0) {
- int newConn = mCurAudioRoutes.mMainType;
- if (state != 0) {
- newConn |= connType;
- } else {
- newConn &= ~connType;
- }
- if (newConn != mCurAudioRoutes.mMainType) {
- mCurAudioRoutes.mMainType = newConn;
- sendMsg(mAudioHandler, MSG_REPORT_NEW_ROUTES,
- SENDMSG_NOOP, 0, 0, null, 0);
- }
- }
- }
-
- final long ident = Binder.clearCallingIdentity();
- try {
- ActivityManagerNative.broadcastStickyIntent(intent, null, UserHandle.USER_ALL);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- private void onSetWiredDeviceConnectionState(int device, int state, String name)
- {
- synchronized (mConnectedDevices) {
- if ((state == 0) && ((device == AudioSystem.DEVICE_OUT_WIRED_HEADSET) ||
- (device == AudioSystem.DEVICE_OUT_WIRED_HEADPHONE) ||
- (device == AudioSystem.DEVICE_OUT_LINE))) {
- setBluetoothA2dpOnInt(true);
- }
- boolean isUsb = ((device & ~AudioSystem.DEVICE_OUT_ALL_USB) == 0) ||
- (((device & AudioSystem.DEVICE_BIT_IN) != 0) &&
- ((device & ~AudioSystem.DEVICE_IN_ALL_USB) == 0));
- handleDeviceConnection((state == 1), device, (isUsb ? name : ""));
- if (state != 0) {
- if ((device == AudioSystem.DEVICE_OUT_WIRED_HEADSET) ||
- (device == AudioSystem.DEVICE_OUT_WIRED_HEADPHONE) ||
- (device == AudioSystem.DEVICE_OUT_LINE)) {
- setBluetoothA2dpOnInt(false);
- }
- if ((device & mSafeMediaVolumeDevices) != 0) {
- sendMsg(mAudioHandler,
- MSG_CHECK_MUSIC_ACTIVE,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- MUSIC_ACTIVE_POLL_PERIOD_MS);
- }
- // Television devices without CEC service apply software volume on HDMI output
- if (isPlatformTelevision() && ((device & AudioSystem.DEVICE_OUT_HDMI) != 0)) {
- mFixedVolumeDevices |= AudioSystem.DEVICE_OUT_HDMI;
- checkAllFixedVolumeDevices();
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- if (mHdmiPlaybackClient != null) {
- mHdmiCecSink = false;
- mHdmiPlaybackClient.queryDisplayStatus(mHdmiDisplayStatusCallback);
- }
- }
- }
- }
- } else {
- if (isPlatformTelevision() && ((device & AudioSystem.DEVICE_OUT_HDMI) != 0)) {
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- mHdmiCecSink = false;
- }
- }
- }
- }
- if (!isUsb && (device != AudioSystem.DEVICE_IN_WIRED_HEADSET)) {
- sendDeviceConnectionIntent(device, state, name);
- }
- }
- }
-
- private void configureHdmiPlugIntent(Intent intent, int state) {
- intent.setAction(AudioManager.ACTION_HDMI_AUDIO_PLUG);
- intent.putExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, state);
- if (state == 1) {
- ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
- int[] portGeneration = new int[1];
- int status = AudioSystem.listAudioPorts(ports, portGeneration);
- if (status == AudioManager.SUCCESS) {
- for (AudioPort port : ports) {
- if (port instanceof AudioDevicePort) {
- final AudioDevicePort devicePort = (AudioDevicePort) port;
- if (devicePort.type() == AudioManager.DEVICE_OUT_HDMI ||
- devicePort.type() == AudioManager.DEVICE_OUT_HDMI_ARC) {
- // format the list of supported encodings
- int[] formats = devicePort.formats();
- if (formats.length > 0) {
- ArrayList<Integer> encodingList = new ArrayList(1);
- for (int format : formats) {
- // a format in the list can be 0, skip it
- if (format != AudioFormat.ENCODING_INVALID) {
- encodingList.add(format);
- }
- }
- int[] encodingArray = new int[encodingList.size()];
- for (int i = 0 ; i < encodingArray.length ; i++) {
- encodingArray[i] = encodingList.get(i);
- }
- intent.putExtra(AudioManager.EXTRA_ENCODINGS, encodingArray);
- }
- // find the maximum supported number of channels
- int maxChannels = 0;
- for (int mask : devicePort.channelMasks()) {
- int channelCount = AudioFormat.channelCountFromOutChannelMask(mask);
- if (channelCount > maxChannels) {
- maxChannels = channelCount;
- }
- }
- intent.putExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, maxChannels);
- }
- }
- }
- }
- }
- }
-
- /* cache of the address of the last dock the device was connected to */
- private String mDockAddress;
-
- /**
- * Receiver for misc intent broadcasts the Phone app cares about.
- */
- private class AudioServiceBroadcastReceiver extends BroadcastReceiver {
- @Override
- public void onReceive(Context context, Intent intent) {
- String action = intent.getAction();
- int outDevice;
- int inDevice;
- int state;
-
- if (action.equals(Intent.ACTION_DOCK_EVENT)) {
- int dockState = intent.getIntExtra(Intent.EXTRA_DOCK_STATE,
- Intent.EXTRA_DOCK_STATE_UNDOCKED);
- int config;
- switch (dockState) {
- case Intent.EXTRA_DOCK_STATE_DESK:
- config = AudioSystem.FORCE_BT_DESK_DOCK;
- break;
- case Intent.EXTRA_DOCK_STATE_CAR:
- config = AudioSystem.FORCE_BT_CAR_DOCK;
- break;
- case Intent.EXTRA_DOCK_STATE_LE_DESK:
- config = AudioSystem.FORCE_ANALOG_DOCK;
- break;
- case Intent.EXTRA_DOCK_STATE_HE_DESK:
- config = AudioSystem.FORCE_DIGITAL_DOCK;
- break;
- case Intent.EXTRA_DOCK_STATE_UNDOCKED:
- default:
- config = AudioSystem.FORCE_NONE;
- }
- // Low end docks have a menu to enable or disable audio
- // (see mDockAudioMediaEnabled)
- if (!((dockState == Intent.EXTRA_DOCK_STATE_LE_DESK) ||
- ((dockState == Intent.EXTRA_DOCK_STATE_UNDOCKED) &&
- (mDockState == Intent.EXTRA_DOCK_STATE_LE_DESK)))) {
- AudioSystem.setForceUse(AudioSystem.FOR_DOCK, config);
- }
- mDockState = dockState;
- } else if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
- state = intent.getIntExtra(BluetoothProfile.EXTRA_STATE,
- BluetoothProfile.STATE_DISCONNECTED);
- outDevice = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO;
- inDevice = AudioSystem.DEVICE_IN_BLUETOOTH_SCO_HEADSET;
- String address = null;
-
- BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
- if (btDevice == null) {
- return;
- }
-
- address = btDevice.getAddress();
- BluetoothClass btClass = btDevice.getBluetoothClass();
- if (btClass != null) {
- switch (btClass.getDeviceClass()) {
- case BluetoothClass.Device.AUDIO_VIDEO_WEARABLE_HEADSET:
- case BluetoothClass.Device.AUDIO_VIDEO_HANDSFREE:
- outDevice = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_HEADSET;
- break;
- case BluetoothClass.Device.AUDIO_VIDEO_CAR_AUDIO:
- outDevice = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
- break;
- }
- }
-
- if (!BluetoothAdapter.checkBluetoothAddress(address)) {
- address = "";
- }
-
- boolean connected = (state == BluetoothProfile.STATE_CONNECTED);
- boolean success = handleDeviceConnection(connected, outDevice, address) &&
- handleDeviceConnection(connected, inDevice, address);
- if (success) {
- synchronized (mScoClients) {
- if (connected) {
- mBluetoothHeadsetDevice = btDevice;
- } else {
- mBluetoothHeadsetDevice = null;
- resetBluetoothSco();
- }
- }
- }
- } else if (action.equals(AudioManager.ACTION_USB_AUDIO_ACCESSORY_PLUG)) {
- state = intent.getIntExtra("state", 0);
-
- int alsaCard = intent.getIntExtra("card", -1);
- int alsaDevice = intent.getIntExtra("device", -1);
-
- String params = (alsaCard == -1 && alsaDevice == -1 ? ""
- : "card=" + alsaCard + ";device=" + alsaDevice);
-
- // Playback Device
- outDevice = AudioSystem.DEVICE_OUT_USB_ACCESSORY;
- setWiredDeviceConnectionState(outDevice, state, params);
- } else if (action.equals(AudioManager.ACTION_USB_AUDIO_DEVICE_PLUG)) {
- // FIXME Does not yet handle the case where the setting is changed
- // after device connection. Ideally we should handle the settings change
- // in SettingsObserver. Here we should log that a USB device is connected
- // and disconnected with its address (card , device) and force the
- // connection or disconnection when the setting changes.
- int isDisabled = Settings.Secure.getInt(mContentResolver,
- Settings.Secure.USB_AUDIO_AUTOMATIC_ROUTING_DISABLED, 0);
- if (isDisabled != 0) {
- return;
- }
-
- state = intent.getIntExtra("state", 0);
-
- int alsaCard = intent.getIntExtra("card", -1);
- int alsaDevice = intent.getIntExtra("device", -1);
- boolean hasPlayback = intent.getBooleanExtra("hasPlayback", false);
- boolean hasCapture = intent.getBooleanExtra("hasCapture", false);
- boolean hasMIDI = intent.getBooleanExtra("hasMIDI", false);
-
- String params = (alsaCard == -1 && alsaDevice == -1 ? ""
- : "card=" + alsaCard + ";device=" + alsaDevice);
-
- // Playback Device
- if (hasPlayback) {
- outDevice = AudioSystem.DEVICE_OUT_USB_DEVICE;
- setWiredDeviceConnectionState(outDevice, state, params);
- }
-
- // Capture Device
- if (hasCapture) {
- inDevice = AudioSystem.DEVICE_IN_USB_DEVICE;
- setWiredDeviceConnectionState(inDevice, state, params);
- }
- } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
- boolean broadcast = false;
- int scoAudioState = AudioManager.SCO_AUDIO_STATE_ERROR;
- synchronized (mScoClients) {
- int btState = intent.getIntExtra(BluetoothProfile.EXTRA_STATE, -1);
- // broadcast intent if the connection was initated by AudioService
- if (!mScoClients.isEmpty() &&
- (mScoAudioState == SCO_STATE_ACTIVE_INTERNAL ||
- mScoAudioState == SCO_STATE_ACTIVATE_REQ ||
- mScoAudioState == SCO_STATE_DEACTIVATE_REQ)) {
- broadcast = true;
- }
- switch (btState) {
- case BluetoothHeadset.STATE_AUDIO_CONNECTED:
- scoAudioState = AudioManager.SCO_AUDIO_STATE_CONNECTED;
- if (mScoAudioState != SCO_STATE_ACTIVE_INTERNAL &&
- mScoAudioState != SCO_STATE_DEACTIVATE_REQ &&
- mScoAudioState != SCO_STATE_DEACTIVATE_EXT_REQ) {
- mScoAudioState = SCO_STATE_ACTIVE_EXTERNAL;
- }
- break;
- case BluetoothHeadset.STATE_AUDIO_DISCONNECTED:
- scoAudioState = AudioManager.SCO_AUDIO_STATE_DISCONNECTED;
- mScoAudioState = SCO_STATE_INACTIVE;
- clearAllScoClients(0, false);
- break;
- case BluetoothHeadset.STATE_AUDIO_CONNECTING:
- if (mScoAudioState != SCO_STATE_ACTIVE_INTERNAL &&
- mScoAudioState != SCO_STATE_DEACTIVATE_REQ &&
- mScoAudioState != SCO_STATE_DEACTIVATE_EXT_REQ) {
- mScoAudioState = SCO_STATE_ACTIVE_EXTERNAL;
- }
- default:
- // do not broadcast CONNECTING or invalid state
- broadcast = false;
- break;
- }
- }
- if (broadcast) {
- broadcastScoConnectionState(scoAudioState);
- //FIXME: this is to maintain compatibility with deprecated intent
- // AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED. Remove when appropriate.
- Intent newIntent = new Intent(AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED);
- newIntent.putExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, scoAudioState);
- sendStickyBroadcastToAll(newIntent);
- }
- } else if (action.equals(Intent.ACTION_SCREEN_ON)) {
- if (mMonitorRotation) {
- mOrientationListener.onOrientationChanged(0); //argument is ignored anyway
- mOrientationListener.enable();
- }
- AudioSystem.setParameters("screen_state=on");
- } else if (action.equals(Intent.ACTION_SCREEN_OFF)) {
- if (mMonitorRotation) {
- //reduce wakeups (save current) by only listening when display is on
- mOrientationListener.disable();
- }
- AudioSystem.setParameters("screen_state=off");
- } else if (action.equals(Intent.ACTION_CONFIGURATION_CHANGED)) {
- handleConfigurationChanged(context);
- } else if (action.equals(Intent.ACTION_USER_SWITCHED)) {
- // attempt to stop music playback for background user
- sendMsg(mAudioHandler,
- MSG_BROADCAST_AUDIO_BECOMING_NOISY,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- 0);
- // the current audio focus owner is no longer valid
- mMediaFocusControl.discardAudioFocusOwner();
-
- // load volume settings for new user
- readAudioSettings(true /*userSwitch*/);
- // preserve STREAM_MUSIC volume from one user to the next.
- sendMsg(mAudioHandler,
- MSG_SET_ALL_VOLUMES,
- SENDMSG_QUEUE,
- 0,
- 0,
- mStreamStates[AudioSystem.STREAM_MUSIC], 0);
- }
- }
- } // end class AudioServiceBroadcastReceiver
-
- //==========================================================================================
- // RemoteControlDisplay / RemoteControlClient / Remote info
- //==========================================================================================
- public boolean registerRemoteController(IRemoteControlDisplay rcd, int w, int h,
- ComponentName listenerComp) {
- return mMediaFocusControl.registerRemoteController(rcd, w, h, listenerComp);
- }
-
- public boolean registerRemoteControlDisplay(IRemoteControlDisplay rcd, int w, int h) {
- return mMediaFocusControl.registerRemoteControlDisplay(rcd, w, h);
- }
-
- public void unregisterRemoteControlDisplay(IRemoteControlDisplay rcd) {
- mMediaFocusControl.unregisterRemoteControlDisplay(rcd);
- }
-
- public void remoteControlDisplayUsesBitmapSize(IRemoteControlDisplay rcd, int w, int h) {
- mMediaFocusControl.remoteControlDisplayUsesBitmapSize(rcd, w, h);
- }
-
- public void remoteControlDisplayWantsPlaybackPositionSync(IRemoteControlDisplay rcd,
- boolean wantsSync) {
- mMediaFocusControl.remoteControlDisplayWantsPlaybackPositionSync(rcd, wantsSync);
- }
-
- @Override
- public void setRemoteStreamVolume(int index) {
- enforceSelfOrSystemUI("set the remote stream volume");
- mMediaFocusControl.setRemoteStreamVolume(index);
- }
-
- //==========================================================================================
- // Audio Focus
- //==========================================================================================
- public int requestAudioFocus(AudioAttributes aa, int durationHint, IBinder cb,
- IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags,
- IAudioPolicyCallback pcb) {
- // permission checks
- if ((flags & AudioManager.AUDIOFOCUS_FLAG_LOCK) == AudioManager.AUDIOFOCUS_FLAG_LOCK) {
- if (mMediaFocusControl.IN_VOICE_COMM_FOCUS_ID.equals(clientId)) {
- if (PackageManager.PERMISSION_GRANTED != mContext.checkCallingOrSelfPermission(
- android.Manifest.permission.MODIFY_PHONE_STATE)) {
- Log.e(TAG, "Invalid permission to (un)lock audio focus", new Exception());
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- }
- } else {
- // only a registered audio policy can be used to lock focus
- synchronized (mAudioPolicies) {
- if (!mAudioPolicies.containsKey(pcb.asBinder())) {
- Log.e(TAG, "Invalid unregistered AudioPolicy to (un)lock audio focus");
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- }
- }
- }
- }
-
- return mMediaFocusControl.requestAudioFocus(aa, durationHint, cb, fd,
- clientId, callingPackageName, flags);
- }
-
- public int abandonAudioFocus(IAudioFocusDispatcher fd, String clientId, AudioAttributes aa) {
- return mMediaFocusControl.abandonAudioFocus(fd, clientId, aa);
- }
-
- public void unregisterAudioFocusClient(String clientId) {
- mMediaFocusControl.unregisterAudioFocusClient(clientId);
- }
-
- public int getCurrentAudioFocus() {
- return mMediaFocusControl.getCurrentAudioFocus();
- }
-
- //==========================================================================================
- // Device orientation
- //==========================================================================================
- /**
- * Handles device configuration changes that may map to a change in the orientation
- * or orientation.
- * Monitoring orientation and rotation is optional, and is defined by the definition and value
- * of the "ro.audio.monitorOrientation" and "ro.audio.monitorRotation" system properties.
- */
- private void handleConfigurationChanged(Context context) {
- try {
- // reading new orientation "safely" (i.e. under try catch) in case anything
- // goes wrong when obtaining resources and configuration
- Configuration config = context.getResources().getConfiguration();
- // TODO merge rotation and orientation
- if (mMonitorOrientation) {
- int newOrientation = config.orientation;
- if (newOrientation != mDeviceOrientation) {
- mDeviceOrientation = newOrientation;
- setOrientationForAudioSystem();
- }
- }
- sendMsg(mAudioHandler,
- MSG_CONFIGURE_SAFE_MEDIA_VOLUME,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- 0);
-
- boolean cameraSoundForced = mContext.getResources().getBoolean(
- com.android.internal.R.bool.config_camera_sound_forced);
- synchronized (mSettingsLock) {
- boolean cameraSoundForcedChanged = false;
- synchronized (mCameraSoundForced) {
- if (cameraSoundForced != mCameraSoundForced) {
- mCameraSoundForced = cameraSoundForced;
- cameraSoundForcedChanged = true;
- }
- }
- if (cameraSoundForcedChanged) {
- if (!isPlatformTelevision()) {
- VolumeStreamState s = mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED];
- if (cameraSoundForced) {
- s.setAllIndexesToMax();
- mRingerModeAffectedStreams &=
- ~(1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- } else {
- s.setAllIndexes(mStreamStates[AudioSystem.STREAM_SYSTEM]);
- mRingerModeAffectedStreams |=
- (1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- }
- // take new state into account for streams muted by ringer mode
- setRingerModeInt(getRingerModeInternal(), false);
- }
-
- sendMsg(mAudioHandler,
- MSG_SET_FORCE_USE,
- SENDMSG_QUEUE,
- AudioSystem.FOR_SYSTEM,
- cameraSoundForced ?
- AudioSystem.FORCE_SYSTEM_ENFORCED : AudioSystem.FORCE_NONE,
- null,
- 0);
-
- sendMsg(mAudioHandler,
- MSG_SET_ALL_VOLUMES,
- SENDMSG_QUEUE,
- 0,
- 0,
- mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED], 0);
- }
- }
- mVolumeController.setLayoutDirection(config.getLayoutDirection());
- } catch (Exception e) {
- Log.e(TAG, "Error handling configuration change: ", e);
- }
- }
-
- private void setOrientationForAudioSystem() {
- switch (mDeviceOrientation) {
- case Configuration.ORIENTATION_LANDSCAPE:
- //Log.i(TAG, "orientation is landscape");
- AudioSystem.setParameters("orientation=landscape");
- break;
- case Configuration.ORIENTATION_PORTRAIT:
- //Log.i(TAG, "orientation is portrait");
- AudioSystem.setParameters("orientation=portrait");
- break;
- case Configuration.ORIENTATION_SQUARE:
- //Log.i(TAG, "orientation is square");
- AudioSystem.setParameters("orientation=square");
- break;
- case Configuration.ORIENTATION_UNDEFINED:
- //Log.i(TAG, "orientation is undefined");
- AudioSystem.setParameters("orientation=undefined");
- break;
- default:
- Log.e(TAG, "Unknown orientation");
- }
- }
-
- private void setRotationForAudioSystem() {
- switch (mDeviceRotation) {
- case Surface.ROTATION_0:
- AudioSystem.setParameters("rotation=0");
- break;
- case Surface.ROTATION_90:
- AudioSystem.setParameters("rotation=90");
- break;
- case Surface.ROTATION_180:
- AudioSystem.setParameters("rotation=180");
- break;
- case Surface.ROTATION_270:
- AudioSystem.setParameters("rotation=270");
- break;
- default:
- Log.e(TAG, "Unknown device rotation");
- }
- }
-
-
- // Handles request to override default use of A2DP for media.
- public void setBluetoothA2dpOnInt(boolean on) {
- synchronized (mBluetoothA2dpEnabledLock) {
- mBluetoothA2dpEnabled = on;
- mAudioHandler.removeMessages(MSG_SET_FORCE_BT_A2DP_USE);
- AudioSystem.setForceUse(AudioSystem.FOR_MEDIA,
- mBluetoothA2dpEnabled ? AudioSystem.FORCE_NONE : AudioSystem.FORCE_NO_BT_A2DP);
- }
- }
-
- @Override
- public void setRingtonePlayer(IRingtonePlayer player) {
- mContext.enforceCallingOrSelfPermission(REMOTE_AUDIO_PLAYBACK, null);
- mRingtonePlayer = player;
- }
-
- @Override
- public IRingtonePlayer getRingtonePlayer() {
- return mRingtonePlayer;
- }
-
- @Override
- public AudioRoutesInfo startWatchingRoutes(IAudioRoutesObserver observer) {
- synchronized (mCurAudioRoutes) {
- AudioRoutesInfo routes = new AudioRoutesInfo(mCurAudioRoutes);
- mRoutesObservers.register(observer);
- return routes;
- }
- }
-
-
- //==========================================================================================
- // Safe media volume management.
- // MUSIC stream volume level is limited when headphones are connected according to safety
- // regulation. When the user attempts to raise the volume above the limit, a warning is
- // displayed and the user has to acknowlegde before the volume is actually changed.
- // The volume index corresponding to the limit is stored in config_safe_media_volume_index
- // property. Platforms with a different limit must set this property accordingly in their
- // overlay.
- //==========================================================================================
-
- // mSafeMediaVolumeState indicates whether the media volume is limited over headphones.
- // It is SAFE_MEDIA_VOLUME_NOT_CONFIGURED at boot time until a network service is connected
- // or the configure time is elapsed. It is then set to SAFE_MEDIA_VOLUME_ACTIVE or
- // SAFE_MEDIA_VOLUME_DISABLED according to country option. If not SAFE_MEDIA_VOLUME_DISABLED, it
- // can be set to SAFE_MEDIA_VOLUME_INACTIVE by calling AudioService.disableSafeMediaVolume()
- // (when user opts out).
- private static final int SAFE_MEDIA_VOLUME_NOT_CONFIGURED = 0;
- private static final int SAFE_MEDIA_VOLUME_DISABLED = 1;
- private static final int SAFE_MEDIA_VOLUME_INACTIVE = 2; // confirmed
- private static final int SAFE_MEDIA_VOLUME_ACTIVE = 3; // unconfirmed
- private Integer mSafeMediaVolumeState;
-
- private int mMcc = 0;
- // mSafeMediaVolumeIndex is the cached value of config_safe_media_volume_index property
- private int mSafeMediaVolumeIndex;
- // mSafeMediaVolumeDevices lists the devices for which safe media volume is enforced,
- private final int mSafeMediaVolumeDevices = AudioSystem.DEVICE_OUT_WIRED_HEADSET |
- AudioSystem.DEVICE_OUT_WIRED_HEADPHONE;
- // mMusicActiveMs is the cumulative time of music activity since safe volume was disabled.
- // When this time reaches UNSAFE_VOLUME_MUSIC_ACTIVE_MS_MAX, the safe media volume is re-enabled
- // automatically. mMusicActiveMs is rounded to a multiple of MUSIC_ACTIVE_POLL_PERIOD_MS.
- private int mMusicActiveMs;
- private static final int UNSAFE_VOLUME_MUSIC_ACTIVE_MS_MAX = (20 * 3600 * 1000); // 20 hours
- private static final int MUSIC_ACTIVE_POLL_PERIOD_MS = 60000; // 1 minute polling interval
- private static final int SAFE_VOLUME_CONFIGURE_TIMEOUT_MS = 30000; // 30s after boot completed
-
- private void setSafeMediaVolumeEnabled(boolean on) {
- synchronized (mSafeMediaVolumeState) {
- if ((mSafeMediaVolumeState != SAFE_MEDIA_VOLUME_NOT_CONFIGURED) &&
- (mSafeMediaVolumeState != SAFE_MEDIA_VOLUME_DISABLED)) {
- if (on && (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_INACTIVE)) {
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_ACTIVE;
- enforceSafeMediaVolume();
- } else if (!on && (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_ACTIVE)) {
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_INACTIVE;
- mMusicActiveMs = 1; // nonzero = confirmed
- saveMusicActiveMs();
- sendMsg(mAudioHandler,
- MSG_CHECK_MUSIC_ACTIVE,
- SENDMSG_REPLACE,
- 0,
- 0,
- null,
- MUSIC_ACTIVE_POLL_PERIOD_MS);
- }
- }
- }
- }
-
- private void enforceSafeMediaVolume() {
- VolumeStreamState streamState = mStreamStates[AudioSystem.STREAM_MUSIC];
- int devices = mSafeMediaVolumeDevices;
- int i = 0;
-
- while (devices != 0) {
- int device = 1 << i++;
- if ((device & devices) == 0) {
- continue;
- }
- int index = streamState.getIndex(device);
- if (index > mSafeMediaVolumeIndex) {
- streamState.setIndex(mSafeMediaVolumeIndex, device);
- sendMsg(mAudioHandler,
- MSG_SET_DEVICE_VOLUME,
- SENDMSG_QUEUE,
- device,
- 0,
- streamState,
- 0);
- }
- devices &= ~device;
- }
- }
-
- private boolean checkSafeMediaVolume(int streamType, int index, int device) {
- synchronized (mSafeMediaVolumeState) {
- if ((mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_ACTIVE) &&
- (mStreamVolumeAlias[streamType] == AudioSystem.STREAM_MUSIC) &&
- ((device & mSafeMediaVolumeDevices) != 0) &&
- (index > mSafeMediaVolumeIndex)) {
- return false;
- }
- return true;
- }
- }
-
- @Override
- public void disableSafeMediaVolume() {
- enforceSelfOrSystemUI("disable the safe media volume");
- synchronized (mSafeMediaVolumeState) {
- setSafeMediaVolumeEnabled(false);
- if (mPendingVolumeCommand != null) {
- onSetStreamVolume(mPendingVolumeCommand.mStreamType,
- mPendingVolumeCommand.mIndex,
- mPendingVolumeCommand.mFlags,
- mPendingVolumeCommand.mDevice);
- mPendingVolumeCommand = null;
- }
- }
- }
-
- //==========================================================================================
- // Hdmi Cec system audio mode.
- // If Hdmi Cec's system audio mode is on, audio service should notify volume change
- // to HdmiControlService so that audio recevier can handle volume change.
- //==========================================================================================
-
- private class MyDisplayStatusCallback implements HdmiPlaybackClient.DisplayStatusCallback {
- public void onComplete(int status) {
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- mHdmiCecSink = (status != HdmiControlManager.POWER_STATUS_UNKNOWN);
- // Television devices without CEC service apply software volume on HDMI output
- if (isPlatformTelevision() && !mHdmiCecSink) {
- mFixedVolumeDevices &= ~AudioSystem.DEVICE_OUT_HDMI;
- }
- checkAllFixedVolumeDevices();
- }
- }
- }
- };
-
- // If HDMI-CEC system audio is supported
- private boolean mHdmiSystemAudioSupported = false;
- // Set only when device is tv.
- private HdmiTvClient mHdmiTvClient;
- // true if the device has system feature PackageManager.FEATURE_LEANBACK.
- // cached HdmiControlManager interface
- private HdmiControlManager mHdmiManager;
- // Set only when device is a set-top box.
- private HdmiPlaybackClient mHdmiPlaybackClient;
- // true if we are a set-top box, an HDMI sink is connected and it supports CEC.
- private boolean mHdmiCecSink;
-
- private MyDisplayStatusCallback mHdmiDisplayStatusCallback = new MyDisplayStatusCallback();
-
- @Override
- public int setHdmiSystemAudioSupported(boolean on) {
- int device = AudioSystem.DEVICE_NONE;
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- if (mHdmiTvClient == null) {
- Log.w(TAG, "Only Hdmi-Cec enabled TV device supports system audio mode.");
- return device;
- }
-
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported != on) {
- mHdmiSystemAudioSupported = on;
- AudioSystem.setForceUse(AudioSystem.FOR_HDMI_SYSTEM_AUDIO,
- on ? AudioSystem.FORCE_HDMI_SYSTEM_AUDIO_ENFORCED :
- AudioSystem.FORCE_NONE);
- }
- device = AudioSystem.getDevicesForStream(AudioSystem.STREAM_MUSIC);
- }
- }
- }
- return device;
- }
-
- @Override
- public boolean isHdmiSystemAudioSupported() {
- return mHdmiSystemAudioSupported;
- }
-
- //==========================================================================================
- // Accessibility: taking touch exploration into account for selecting the default
- // stream override timeout when adjusting volume
- //==========================================================================================
- private static class StreamOverride
- implements AccessibilityManager.TouchExplorationStateChangeListener {
-
- // AudioService.getActiveStreamType() will return:
- // - STREAM_NOTIFICATION on tablets during this period after a notification stopped
- // - STREAM_MUSIC on phones during this period after music or talkback/voice search prompt
- // stopped
- private static final int DEFAULT_STREAM_TYPE_OVERRIDE_DELAY_MS = 5000;
- private static final int TOUCH_EXPLORE_STREAM_TYPE_OVERRIDE_DELAY_MS = 1000;
-
- static int sDelayMs;
-
- static void init(Context ctxt) {
- AccessibilityManager accessibilityManager =
- (AccessibilityManager) ctxt.getSystemService(Context.ACCESSIBILITY_SERVICE);
- updateDefaultStreamOverrideDelay(
- accessibilityManager.isTouchExplorationEnabled());
- accessibilityManager.addTouchExplorationStateChangeListener(
- new StreamOverride());
- }
-
- @Override
- public void onTouchExplorationStateChanged(boolean enabled) {
- updateDefaultStreamOverrideDelay(enabled);
- }
-
- private static void updateDefaultStreamOverrideDelay(boolean touchExploreEnabled) {
- if (touchExploreEnabled) {
- sDelayMs = TOUCH_EXPLORE_STREAM_TYPE_OVERRIDE_DELAY_MS;
- } else {
- sDelayMs = DEFAULT_STREAM_TYPE_OVERRIDE_DELAY_MS;
- }
- if (DEBUG_VOL) Log.d(TAG, "Touch exploration enabled=" + touchExploreEnabled
- + " stream override delay is now " + sDelayMs + " ms");
- }
- }
-
- //==========================================================================================
- // Camera shutter sound policy.
- // config_camera_sound_forced configuration option in config.xml defines if the camera shutter
- // sound is forced (sound even if the device is in silent mode) or not. This option is false by
- // default and can be overridden by country specific overlay in values-mccXXX/config.xml.
- //==========================================================================================
-
- // cached value of com.android.internal.R.bool.config_camera_sound_forced
- private Boolean mCameraSoundForced;
-
- // called by android.hardware.Camera to populate CameraInfo.canDisableShutterSound
- public boolean isCameraSoundForced() {
- synchronized (mCameraSoundForced) {
- return mCameraSoundForced;
- }
- }
-
- private static final String[] RINGER_MODE_NAMES = new String[] {
- "SILENT",
- "VIBRATE",
- "NORMAL"
- };
-
- private void dumpRingerMode(PrintWriter pw) {
- pw.println("\nRinger mode: ");
- pw.println("- mode (internal) = " + RINGER_MODE_NAMES[mRingerMode]);
- pw.println("- mode (external) = " + RINGER_MODE_NAMES[mRingerModeExternal]);
- pw.print("- ringer mode affected streams = 0x");
- pw.println(Integer.toHexString(mRingerModeAffectedStreams));
- pw.print("- ringer mode muted streams = 0x");
- pw.println(Integer.toHexString(mRingerModeMutedStreams));
- pw.print("- delegate = "); pw.println(mRingerModeDelegate);
- }
-
- @Override
- protected void dump(FileDescriptor fd, PrintWriter pw, String[] args) {
- mContext.enforceCallingOrSelfPermission(android.Manifest.permission.DUMP, TAG);
-
- mMediaFocusControl.dump(pw);
- dumpStreamStates(pw);
- dumpRingerMode(pw);
- pw.println("\nAudio routes:");
- pw.print(" mMainType=0x"); pw.println(Integer.toHexString(mCurAudioRoutes.mMainType));
- pw.print(" mBluetoothName="); pw.println(mCurAudioRoutes.mBluetoothName);
-
- pw.println("\nOther state:");
- pw.print(" mVolumeController="); pw.println(mVolumeController);
- pw.print(" mSafeMediaVolumeState=");
- pw.println(safeMediaVolumeStateToString(mSafeMediaVolumeState));
- pw.print(" mSafeMediaVolumeIndex="); pw.println(mSafeMediaVolumeIndex);
- pw.print(" mPendingVolumeCommand="); pw.println(mPendingVolumeCommand);
- pw.print(" mMusicActiveMs="); pw.println(mMusicActiveMs);
- pw.print(" mMcc="); pw.println(mMcc);
- pw.print(" mHasVibrator="); pw.println(mHasVibrator);
-
- dumpAudioPolicies(pw);
- }
-
- private static String safeMediaVolumeStateToString(Integer state) {
- switch(state) {
- case SAFE_MEDIA_VOLUME_NOT_CONFIGURED: return "SAFE_MEDIA_VOLUME_NOT_CONFIGURED";
- case SAFE_MEDIA_VOLUME_DISABLED: return "SAFE_MEDIA_VOLUME_DISABLED";
- case SAFE_MEDIA_VOLUME_INACTIVE: return "SAFE_MEDIA_VOLUME_INACTIVE";
- case SAFE_MEDIA_VOLUME_ACTIVE: return "SAFE_MEDIA_VOLUME_ACTIVE";
- }
- return null;
- }
-
- // Inform AudioFlinger of our device's low RAM attribute
- private static void readAndSetLowRamDevice()
- {
- int status = AudioSystem.setLowRamDevice(ActivityManager.isLowRamDeviceStatic());
- if (status != 0) {
- Log.w(TAG, "AudioFlinger informed of device's low RAM attribute; status " + status);
- }
- }
-
- private void enforceSelfOrSystemUI(String action) {
- mContext.enforceCallingOrSelfPermission(android.Manifest.permission.STATUS_BAR_SERVICE,
- "Only SystemUI can " + action);
- }
-
- @Override
- public void setVolumeController(final IVolumeController controller) {
- enforceSelfOrSystemUI("set the volume controller");
-
- // return early if things are not actually changing
- if (mVolumeController.isSameBinder(controller)) {
- return;
- }
-
- // dismiss the old volume controller
- mVolumeController.postDismiss();
- if (controller != null) {
- // we are about to register a new controller, listen for its death
- try {
- controller.asBinder().linkToDeath(new DeathRecipient() {
- @Override
- public void binderDied() {
- if (mVolumeController.isSameBinder(controller)) {
- Log.w(TAG, "Current remote volume controller died, unregistering");
- setVolumeController(null);
- }
- }
- }, 0);
- } catch (RemoteException e) {
- // noop
- }
- }
- mVolumeController.setController(controller);
- if (DEBUG_VOL) Log.d(TAG, "Volume controller: " + mVolumeController);
- }
-
- @Override
- public void notifyVolumeControllerVisible(final IVolumeController controller, boolean visible) {
- enforceSelfOrSystemUI("notify about volume controller visibility");
-
- // return early if the controller is not current
- if (!mVolumeController.isSameBinder(controller)) {
- return;
- }
-
- mVolumeController.setVisible(visible);
- if (DEBUG_VOL) Log.d(TAG, "Volume controller visible: " + visible);
- }
-
- public static class VolumeController {
- private static final String TAG = "VolumeController";
-
- private IVolumeController mController;
- private boolean mVisible;
- private long mNextLongPress;
- private int mLongPressTimeout;
-
- public void setController(IVolumeController controller) {
- mController = controller;
- mVisible = false;
- }
-
- public void loadSettings(ContentResolver cr) {
- mLongPressTimeout = Settings.Secure.getIntForUser(cr,
- Settings.Secure.LONG_PRESS_TIMEOUT, 500, UserHandle.USER_CURRENT);
- }
-
- public boolean suppressAdjustment(int resolvedStream, int flags) {
- boolean suppress = false;
- if (resolvedStream == AudioSystem.STREAM_RING && mController != null) {
- final long now = SystemClock.uptimeMillis();
- if ((flags & AudioManager.FLAG_SHOW_UI) != 0 && !mVisible) {
- // ui will become visible
- if (mNextLongPress < now) {
- mNextLongPress = now + mLongPressTimeout;
- }
- suppress = true;
- } else if (mNextLongPress > 0) { // in a long-press
- if (now > mNextLongPress) {
- // long press triggered, no more suppression
- mNextLongPress = 0;
- } else {
- // keep suppressing until the long press triggers
- suppress = true;
- }
- }
- }
- return suppress;
- }
-
- public void setVisible(boolean visible) {
- mVisible = visible;
- }
-
- public boolean isSameBinder(IVolumeController controller) {
- return Objects.equals(asBinder(), binder(controller));
- }
-
- public IBinder asBinder() {
- return binder(mController);
- }
-
- private static IBinder binder(IVolumeController controller) {
- return controller == null ? null : controller.asBinder();
- }
-
- @Override
- public String toString() {
- return "VolumeController(" + asBinder() + ",mVisible=" + mVisible + ")";
- }
-
- public void postDisplaySafeVolumeWarning(int flags) {
- if (mController == null)
- return;
- try {
- mController.displaySafeVolumeWarning(flags);
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling displaySafeVolumeWarning", e);
- }
- }
-
- public void postVolumeChanged(int streamType, int flags) {
- if (mController == null)
- return;
- try {
- mController.volumeChanged(streamType, flags);
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling volumeChanged", e);
- }
- }
-
- public void postMasterVolumeChanged(int flags) {
- if (mController == null)
- return;
- try {
- mController.masterVolumeChanged(flags);
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling masterVolumeChanged", e);
- }
- }
-
- public void postMasterMuteChanged(int flags) {
- if (mController == null)
- return;
- try {
- mController.masterMuteChanged(flags);
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling masterMuteChanged", e);
- }
- }
-
- public void setLayoutDirection(int layoutDirection) {
- if (mController == null)
- return;
- try {
- mController.setLayoutDirection(layoutDirection);
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling setLayoutDirection", e);
- }
- }
-
- public void postDismiss() {
- if (mController == null)
- return;
- try {
- mController.dismiss();
- } catch (RemoteException e) {
- Log.w(TAG, "Error calling dismiss", e);
- }
- }
- }
-
- /**
- * Interface for system components to get some extra functionality through
- * LocalServices.
- */
- final class AudioServiceInternal extends AudioManagerInternal {
- @Override
- public void setRingerModeDelegate(RingerModeDelegate delegate) {
- mRingerModeDelegate = delegate;
- if (mRingerModeDelegate != null) {
- setRingerModeInternal(getRingerModeInternal(), TAG + ".setRingerModeDelegate");
- }
- }
-
- @Override
- public void adjustSuggestedStreamVolumeForUid(int streamType, int direction, int flags,
- String callingPackage, int uid) {
- // direction and stream type swap here because the public
- // adjustSuggested has a different order than the other methods.
- adjustSuggestedStreamVolume(direction, streamType, flags, callingPackage, uid);
- }
-
- @Override
- public void adjustStreamVolumeForUid(int streamType, int direction, int flags,
- String callingPackage, int uid) {
- adjustStreamVolume(streamType, direction, flags, callingPackage, uid);
- }
-
- @Override
- public void setStreamVolumeForUid(int streamType, int direction, int flags,
- String callingPackage, int uid) {
- setStreamVolume(streamType, direction, flags, callingPackage, uid);
- }
-
- @Override
- public void adjustMasterVolumeForUid(int steps, int flags, String callingPackage,
- int uid) {
- adjustMasterVolume(steps, flags, callingPackage, uid);
- }
-
- @Override
- public int getRingerModeInternal() {
- return AudioService.this.getRingerModeInternal();
- }
-
- @Override
- public void setRingerModeInternal(int ringerMode, String caller) {
- AudioService.this.setRingerModeInternal(ringerMode, caller);
- }
-
- @Override
- public void setMasterMuteForUid(boolean state, int flags, String callingPackage, IBinder cb,
- int uid) {
- setMasterMuteInternal(state, flags, callingPackage, cb, uid);
- }
- }
-
- //==========================================================================================
- // Audio policy management
- //==========================================================================================
- public String registerAudioPolicy(AudioPolicyConfig policyConfig, IAudioPolicyCallback pcb,
- boolean hasFocusListener) {
- if (DEBUG_AP) Log.d(TAG, "registerAudioPolicy for " + pcb.asBinder()
- + " with config:" + policyConfig);
- String regId = null;
- // error handling
- boolean hasPermissionForPolicy =
- (PackageManager.PERMISSION_GRANTED == mContext.checkCallingPermission(
- android.Manifest.permission.MODIFY_AUDIO_ROUTING));
- if (!hasPermissionForPolicy) {
- Slog.w(TAG, "Can't register audio policy for pid " + Binder.getCallingPid() + " / uid "
- + Binder.getCallingUid() + ", need MODIFY_AUDIO_ROUTING");
- return null;
- }
-
- synchronized (mAudioPolicies) {
- try {
- if (mAudioPolicies.containsKey(pcb.asBinder())) {
- Slog.e(TAG, "Cannot re-register policy");
- return null;
- }
- AudioPolicyProxy app = new AudioPolicyProxy(policyConfig, pcb, hasFocusListener);
- pcb.asBinder().linkToDeath(app, 0/*flags*/);
- regId = app.getRegistrationId();
- mAudioPolicies.put(pcb.asBinder(), app);
- } catch (RemoteException e) {
- // audio policy owner has already died!
- Slog.w(TAG, "Audio policy registration failed, could not link to " + pcb +
- " binder death", e);
- return null;
- }
- }
- return regId;
- }
-
- public void unregisterAudioPolicyAsync(IAudioPolicyCallback pcb) {
- if (DEBUG_AP) Log.d(TAG, "unregisterAudioPolicyAsync for " + pcb.asBinder());
- synchronized (mAudioPolicies) {
- AudioPolicyProxy app = mAudioPolicies.remove(pcb.asBinder());
- if (app == null) {
- Slog.w(TAG, "Trying to unregister unknown audio policy for pid "
- + Binder.getCallingPid() + " / uid " + Binder.getCallingUid());
- return;
- } else {
- pcb.asBinder().unlinkToDeath(app, 0/*flags*/);
- }
- app.release();
- }
- // TODO implement clearing mix attribute matching info in native audio policy
- }
-
- public int setFocusPropertiesForPolicy(int duckingBehavior, IAudioPolicyCallback pcb) {
- if (DEBUG_AP) Log.d(TAG, "setFocusPropertiesForPolicy() duck behavior=" + duckingBehavior
- + " policy " + pcb.asBinder());
- // error handling
- boolean hasPermissionForPolicy =
- (PackageManager.PERMISSION_GRANTED == mContext.checkCallingPermission(
- android.Manifest.permission.MODIFY_AUDIO_ROUTING));
- if (!hasPermissionForPolicy) {
- Slog.w(TAG, "Cannot change audio policy ducking handling for pid " +
- + Binder.getCallingPid() + " / uid "
- + Binder.getCallingUid() + ", need MODIFY_AUDIO_ROUTING");
- return AudioManager.ERROR;
- }
-
- synchronized (mAudioPolicies) {
- if (!mAudioPolicies.containsKey(pcb.asBinder())) {
- Slog.e(TAG, "Cannot change audio policy focus properties, unregistered policy");
- return AudioManager.ERROR;
- }
- final AudioPolicyProxy app = mAudioPolicies.get(pcb.asBinder());
- if (duckingBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
- // is there already one policy managing ducking?
- for(AudioPolicyProxy policy : mAudioPolicies.values()) {
- if (policy.mFocusDuckBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
- Slog.e(TAG, "Cannot change audio policy ducking behavior, already handled");
- return AudioManager.ERROR;
- }
- }
- }
- app.mFocusDuckBehavior = duckingBehavior;
- mMediaFocusControl.setDuckingInExtPolicyAvailable(
- duckingBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY);
- }
- return AudioManager.SUCCESS;
- }
-
- private void dumpAudioPolicies(PrintWriter pw) {
- pw.println("\nAudio policies:");
- synchronized (mAudioPolicies) {
- for(AudioPolicyProxy policy : mAudioPolicies.values()) {
- pw.println(policy.toLogFriendlyString());
- }
- }
- }
-
- //======================
- // Audio policy proxy
- //======================
- /**
- * This internal class inherits from AudioPolicyConfig, each instance contains all the
- * mixes of an AudioPolicy and their configurations.
- */
- public class AudioPolicyProxy extends AudioPolicyConfig implements IBinder.DeathRecipient {
- private static final String TAG = "AudioPolicyProxy";
- AudioPolicyConfig mConfig;
- IAudioPolicyCallback mPolicyToken;
- boolean mHasFocusListener;
- /**
- * Audio focus ducking behavior for an audio policy.
- * This variable reflects the value that was successfully set in
- * {@link AudioService#setFocusPropertiesForPolicy(int, IAudioPolicyCallback)}. This
- * implies that a value of FOCUS_POLICY_DUCKING_IN_POLICY means the corresponding policy
- * is handling ducking for audio focus.
- */
- int mFocusDuckBehavior = AudioPolicy.FOCUS_POLICY_DUCKING_DEFAULT;
-
- AudioPolicyProxy(AudioPolicyConfig config, IAudioPolicyCallback token,
- boolean hasFocusListener) {
- super(config);
- setRegistration(new String(config.hashCode() + ":ap:" + mAudioPolicyCounter++));
- mPolicyToken = token;
- mHasFocusListener = hasFocusListener;
- if (mHasFocusListener) {
- mMediaFocusControl.addFocusFollower(mPolicyToken);
- }
- connectMixes();
- }
-
- public void binderDied() {
- synchronized (mAudioPolicies) {
- Log.i(TAG, "audio policy " + mPolicyToken + " died");
- release();
- mAudioPolicies.remove(mPolicyToken.asBinder());
- }
- }
-
- String getRegistrationId() {
- return getRegistration();
- }
-
- void release() {
- if (mFocusDuckBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
- mMediaFocusControl.setDuckingInExtPolicyAvailable(false);
- }
- if (mHasFocusListener) {
- mMediaFocusControl.removeFocusFollower(mPolicyToken);
- }
- AudioSystem.registerPolicyMixes(mMixes, false);
- }
-
- void connectMixes() {
- AudioSystem.registerPolicyMixes(mMixes, true);
- }
- };
-
- private HashMap<IBinder, AudioPolicyProxy> mAudioPolicies =
- new HashMap<IBinder, AudioPolicyProxy>();
- private int mAudioPolicyCounter = 0; // always accessed synchronized on mAudioPolicies
-}
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 46ab7e0..373f3fd 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -16,7 +16,11 @@
package android.media;
+import android.content.Context;
+import android.content.pm.PackageManager;
import android.media.audiopolicy.AudioMix;
+import android.util.Log;
+
import java.util.ArrayList;
/* IF YOU CHANGE ANY OF THE CONSTANTS IN THIS FILE, DO NOT FORGET
@@ -29,6 +33,7 @@ import java.util.ArrayList;
*/
public class AudioSystem
{
+ private static final String TAG = "AudioSystem";
/* These values must be kept in sync with system/audio.h */
/*
* If these are modified, please also update Settings.System.VOLUME_SETTINGS
@@ -65,6 +70,19 @@ public class AudioSystem
private static final int NUM_STREAM_TYPES = 10;
public static final int getNumStreamTypes() { return NUM_STREAM_TYPES; }
+ public static final String[] STREAM_NAMES = new String[] {
+ "STREAM_VOICE_CALL",
+ "STREAM_SYSTEM",
+ "STREAM_RING",
+ "STREAM_MUSIC",
+ "STREAM_ALARM",
+ "STREAM_NOTIFICATION",
+ "STREAM_BLUETOOTH_SCO",
+ "STREAM_SYSTEM_ENFORCED",
+ "STREAM_DTMF",
+ "STREAM_TTS"
+ };
+
/*
* Sets the microphone mute on or off.
*
@@ -107,7 +125,7 @@ public class AudioSystem
/** @deprecated */
@Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF;
- // Keep in sync with system/core/include/system/audio.h
+ // Keep in sync with system/media/audio/include/system/audio.h
public static final int AUDIO_SESSION_ALLOCATE = 0;
/*
@@ -208,6 +226,48 @@ public class AudioSystem
}
}
+ /**
+ * Handles events for the audio policy manager about dynamic audio policies
+ * @see android.media.audiopolicy.AudioPolicy
+ */
+ public interface DynamicPolicyCallback
+ {
+ void onDynamicPolicyMixStateUpdate(String regId, int state);
+ }
+
+ //keep in sync with include/media/AudioPolicy.h
+ private final static int DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE = 0;
+
+ private static DynamicPolicyCallback sDynPolicyCallback;
+
+ public static void setDynamicPolicyCallback(DynamicPolicyCallback cb)
+ {
+ synchronized (AudioSystem.class) {
+ sDynPolicyCallback = cb;
+ native_register_dynamic_policy_callback();
+ }
+ }
+
+ private static void dynamicPolicyCallbackFromNative(int event, String regId, int val)
+ {
+ DynamicPolicyCallback cb = null;
+ synchronized (AudioSystem.class) {
+ if (sDynPolicyCallback != null) {
+ cb = sDynPolicyCallback;
+ }
+ }
+ if (cb != null) {
+ switch(event) {
+ case DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE:
+ cb.onDynamicPolicyMixStateUpdate(regId, val);
+ break;
+ default:
+ Log.e(TAG, "dynamicPolicyCallbackFromNative: unknown event " + event);
+ }
+ }
+ }
+
+
/*
* Error codes used by public APIs (AudioTrack, AudioRecord, AudioManager ...)
* Must be kept in sync with frameworks/base/core/jni/android_media_AudioErrors.h
@@ -534,7 +594,8 @@ public class AudioSystem
public static final int SYNC_EVENT_NONE = 0;
public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1;
- public static native int setDeviceConnectionState(int device, int state, String device_address);
+ public static native int setDeviceConnectionState(int device, int state,
+ String device_address, String device_name);
public static native int getDeviceConnectionState(int device, String device_address);
public static native int setPhoneState(int state);
public static native int setForceUse(int usage, int config);
@@ -563,11 +624,109 @@ public class AudioSystem
public static native int listAudioPatches(ArrayList<AudioPatch> patches, int[] generation);
public static native int setAudioPortConfig(AudioPortConfig config);
+ // declare this instance as having a dynamic policy callback handler
+ private static native final void native_register_dynamic_policy_callback();
+
// must be kept in sync with value in include/system/audio.h
public static final int AUDIO_HW_SYNC_INVALID = 0;
public static native int getAudioHwSyncForSession(int sessionId);
public static native int registerPolicyMixes(ArrayList<AudioMix> mixes, boolean register);
+
+ public static native int systemReady();
+
+ // Items shared with audio service
+
+ /**
+ * The delay before playing a sound. This small period exists so the user
+ * can press another key (non-volume keys, too) to have it NOT be audible.
+ * <p>
+ * PhoneWindow will implement this part.
+ */
+ public static final int PLAY_SOUND_DELAY = 300;
+
+ /**
+ * Constant to identify a focus stack entry that is used to hold the focus while the phone
+ * is ringing or during a call. Used by com.android.internal.telephony.CallManager when
+ * entering and exiting calls.
+ */
+ public final static String IN_VOICE_COMM_FOCUS_ID = "AudioFocus_For_Phone_Ring_And_Calls";
+
+ /**
+ * @see AudioManager#setVibrateSetting(int, int)
+ */
+ public static int getValueForVibrateSetting(int existingValue, int vibrateType,
+ int vibrateSetting) {
+
+ // First clear the existing setting. Each vibrate type has two bits in
+ // the value. Note '3' is '11' in binary.
+ existingValue &= ~(3 << (vibrateType * 2));
+
+ // Set into the old value
+ existingValue |= (vibrateSetting & 3) << (vibrateType * 2);
+
+ return existingValue;
+ }
+
+ public static int getDefaultStreamVolume(int streamType) {
+ return DEFAULT_STREAM_VOLUME[streamType];
+ }
+
+ public static int[] DEFAULT_STREAM_VOLUME = new int[] {
+ 4, // STREAM_VOICE_CALL
+ 7, // STREAM_SYSTEM
+ 5, // STREAM_RING
+ 11, // STREAM_MUSIC
+ 6, // STREAM_ALARM
+ 5, // STREAM_NOTIFICATION
+ 7, // STREAM_BLUETOOTH_SCO
+ 7, // STREAM_SYSTEM_ENFORCED
+ 11, // STREAM_DTMF
+ 11 // STREAM_TTS
+ };
+
+ public static String streamToString(int stream) {
+ if (stream >= 0 && stream < STREAM_NAMES.length) return STREAM_NAMES[stream];
+ if (stream == AudioManager.USE_DEFAULT_STREAM_TYPE) return "USE_DEFAULT_STREAM_TYPE";
+ return "UNKNOWN_STREAM_" + stream;
+ }
+
+ /** The platform has no specific capabilities */
+ public static final int PLATFORM_DEFAULT = 0;
+ /** The platform is voice call capable (a phone) */
+ public static final int PLATFORM_VOICE = 1;
+ /** The platform is a television or a set-top box */
+ public static final int PLATFORM_TELEVISION = 2;
+
+ /**
+ * Return the platform type that this is running on. One of:
+ * <ul>
+ * <li>{@link #PLATFORM_VOICE}</li>
+ * <li>{@link #PLATFORM_TELEVISION}</li>
+ * <li>{@link #PLATFORM_DEFAULT}</li>
+ * </ul>
+ */
+ public static int getPlatformType(Context context) {
+ if (context.getResources().getBoolean(com.android.internal.R.bool.config_voice_capable)) {
+ return PLATFORM_VOICE;
+ } else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_LEANBACK)) {
+ return PLATFORM_TELEVISION;
+ } else {
+ return PLATFORM_DEFAULT;
+ }
+ }
+
+ public static final int DEFAULT_MUTE_STREAMS_AFFECTED =
+ (1 << STREAM_MUSIC) |
+ (1 << STREAM_RING) |
+ (1 << STREAM_NOTIFICATION) |
+ (1 << STREAM_SYSTEM);
+
+ /**
+ * Event posted by AudioTrack and AudioRecord JNI (JNIDeviceCallback) when routing changes.
+ * Keep in sync with core/jni/android_media_DeviceCallback.h.
+ */
+ final static int NATIVE_EVENT_ROUTING_CHANGE = 1000;
}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 547d87e..f76189c 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -19,12 +19,15 @@ package android.media;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
+import java.lang.Math;
import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
import java.nio.NioUtils;
-import java.util.Iterator;
-import java.util.Set;
+import java.util.Collection;
import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
import android.app.ActivityThread;
import android.app.AppOpsManager;
import android.content.Context;
@@ -35,6 +38,7 @@ import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.util.ArrayMap;
import android.util.Log;
import com.android.internal.app.IAppOpsService;
@@ -92,8 +96,9 @@ public class AudioTrack
/** Minimum value for sample rate */
private static final int SAMPLE_RATE_HZ_MIN = 4000;
/** Maximum value for sample rate */
- private static final int SAMPLE_RATE_HZ_MAX = 96000;
+ private static final int SAMPLE_RATE_HZ_MAX = 192000;
+ // FCC_8
/** Maximum value for AudioTrack channel count */
private static final int CHANNEL_COUNT_MAX = 8;
@@ -116,6 +121,14 @@ public class AudioTrack
*/
public static final int MODE_STREAM = 1;
+ /** @hide */
+ @IntDef({
+ MODE_STATIC,
+ MODE_STREAM
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface TransferMode {}
+
/**
* State of an AudioTrack that was not successfully initialized upon creation.
*/
@@ -179,13 +192,20 @@ public class AudioTrack
/**
* The write mode indicating the write operation will block until all data has been written,
- * to be used in {@link #write(ByteBuffer, int, int)}
+ * to be used as the actual value of the writeMode parameter in
+ * {@link #write(byte[], int, int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
+ * {@link #write(ByteBuffer, int, int, long)}.
*/
public final static int WRITE_BLOCKING = 0;
+
/**
* The write mode indicating the write operation will return immediately after
- * queuing as much audio data for playback as possible without blocking, to be used in
- * {@link #write(ByteBuffer, int, int)}.
+ * queuing as much audio data for playback as possible without blocking,
+ * to be used as the actual value of the writeMode parameter in
+ * {@link #write(ByteBuffer, int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
+ * {@link #write(ByteBuffer, int, int, long)}.
*/
public final static int WRITE_NON_BLOCKING = 1;
@@ -194,25 +214,29 @@ public class AudioTrack
//--------------------
/**
* Indicates the state of the AudioTrack instance.
+ * One of STATE_UNINITIALIZED, STATE_INITIALIZED, or STATE_NO_STATIC_DATA.
*/
private int mState = STATE_UNINITIALIZED;
/**
* Indicates the play state of the AudioTrack instance.
+ * One of PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, or PLAYSTATE_PLAYING.
*/
private int mPlayState = PLAYSTATE_STOPPED;
/**
- * Lock to make sure mPlayState updates are reflecting the actual state of the object.
+ * Lock to ensure mPlayState updates reflect the actual state of the object.
*/
private final Object mPlayStateLock = new Object();
/**
* Sizes of the native audio buffer.
+ * These values are set during construction and can be stale.
+ * To obtain the current native audio buffer frame count use {@link #getBufferSizeInFrames()}.
*/
private int mNativeBufferSizeInBytes = 0;
private int mNativeBufferSizeInFrames = 0;
/**
* Handler for events coming from the native code.
*/
- private NativeEventHandlerDelegate mEventHandlerDelegate;
+ private NativePositionEventHandlerDelegate mEventHandlerDelegate;
/**
* Looper associated with the thread that creates the AudioTrack instance.
*/
@@ -220,15 +244,15 @@ public class AudioTrack
/**
* The audio data source sampling rate in Hz.
*/
- private int mSampleRate; // initialized by all constructors
+ private int mSampleRate; // initialized by all constructors via audioParamCheck()
/**
- * The number of audio output channels (1 is mono, 2 is stereo).
+ * The number of audio output channels (1 is mono, 2 is stereo, etc.).
*/
private int mChannelCount = 1;
/**
- * The audio channel mask.
+ * The audio channel mask used for calling native AudioTrack
*/
- private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
+ private int mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
/**
* The type of the audio stream to play. See
@@ -241,20 +265,26 @@ public class AudioTrack
private final AudioAttributes mAttributes;
/**
- * The way audio is consumed by the audio sink, streaming or static.
+ * The way audio is consumed by the audio sink, one of MODE_STATIC or MODE_STREAM.
*/
private int mDataLoadMode = MODE_STREAM;
/**
- * The current audio channel configuration.
+ * The current channel position mask, as specified on AudioTrack creation.
+ * Can be set simultaneously with channel index mask {@link #mChannelIndexMask}.
+ * May be set to {@link AudioFormat#CHANNEL_INVALID} if a channel index mask is specified.
*/
private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
/**
+ * The channel index mask if specified, otherwise 0.
+ */
+ private int mChannelIndexMask = 0;
+ /**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
* @see AudioFormat#ENCODING_PCM_16BIT
* @see AudioFormat#ENCODING_PCM_FLOAT
*/
- private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ private int mAudioFormat; // initialized by all constructors via audioParamCheck()
/**
* Audio session ID
*/
@@ -263,6 +293,14 @@ public class AudioTrack
* Reference to the app-ops service.
*/
private final IAppOpsService mAppOps;
+ /**
+ * HW_AV_SYNC track AV Sync Header
+ */
+ private ByteBuffer mAvSyncHeader = null;
+ /**
+ * HW_AV_SYNC track audio data bytes remaining to write after current AV sync header
+ */
+ private int mAvSyncBytesRemaining = 0;
//--------------------------------
// Used exclusively by native code
@@ -298,15 +336,20 @@ public class AudioTrack
* {@link AudioFormat#ENCODING_PCM_8BIT},
* and {@link AudioFormat#ENCODING_PCM_FLOAT}.
* @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
- * read from for playback.
- * If track's creation mode is {@link #MODE_STREAM}, you can write data into
- * this buffer in chunks less than or equal to this size, and it is typical to use
- * chunks of 1/2 of the total size to permit double-buffering.
- * If the track's creation mode is {@link #MODE_STATIC},
+ * read from for playback. This should be a multiple of the frame size in bytes.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
* this is the maximum length sample, or audio clip, that can be played by this instance.
- * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
- * for the successful creation of an AudioTrack instance in streaming mode. Using values
- * smaller than getMinBufferSize() will result in an initialization failure.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * this should be the desired buffer size
+ * for the <code>AudioTrack</code> to satisfy the application's
+ * natural latency requirements.
+ * If <code>bufferSizeInBytes</code> is less than the
+ * minimum buffer size for the output sink, it is automatically increased to the minimum
+ * buffer size.
+ * The method {@link #getBufferSizeInFrames()} returns the
+ * actual size in frames of the native buffer created, which
+ * determines the frequency to write
+ * to the streaming <code>AudioTrack</code> to avoid underrun.
* @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
* @throws java.lang.IllegalArgumentException
*/
@@ -416,16 +459,24 @@ public class AudioTrack
rate = 44100;
}
}
- int channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
- if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0)
- {
+ int channelIndexMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ channelIndexMask = format.getChannelIndexMask();
+ }
+ int channelMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
channelMask = format.getChannelMask();
+ } else if (channelIndexMask == 0) { // if no masks at all, use stereo
+ channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT
+ | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
}
int encoding = AudioFormat.ENCODING_DEFAULT;
if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) {
encoding = format.getEncoding();
}
- audioParamCheck(rate, channelMask, encoding, mode);
+ audioParamCheck(rate, channelMask, channelIndexMask, encoding, mode);
mStreamType = AudioSystem.STREAM_DEFAULT;
audioBuffSizeCheck(bufferSizeInBytes);
@@ -434,7 +485,7 @@ public class AudioTrack
IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
mAppOps = IAppOpsService.Stub.asInterface(b);
- mAttributes = (new AudioAttributes.Builder(attributes).build());
+ mAttributes = new AudioAttributes.Builder(attributes).build();
if (sessionId < 0) {
throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
@@ -444,7 +495,7 @@ public class AudioTrack
session[0] = sessionId;
// native initialization
int initResult = native_setup(new WeakReference<AudioTrack>(this), mAttributes,
- mSampleRate, mChannels, mAudioFormat,
+ mSampleRate, mChannelMask, mChannelIndexMask, mAudioFormat,
mNativeBufferSizeInBytes, mDataLoadMode, session);
if (initResult != SUCCESS) {
loge("Error code "+initResult+" when initializing AudioTrack.");
@@ -460,7 +511,190 @@ public class AudioTrack
}
}
- // mask of all the channels supported by this implementation
+ /**
+ * Builder class for {@link AudioTrack} objects.
+ * Use this class to configure and create an <code>AudioTrack</code> instance. By setting audio
+ * attributes and audio format parameters, you indicate which of those vary from the default
+ * behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioTrack</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioTrack player = new AudioTrack.Builder()
+ * .setAudioAttributes(new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_ALARM)
+ * .setContentType(CONTENT_TYPE_MUSIC)
+ * .build())
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(441000)
+ * .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ * .build())
+ * .setBufferSize(minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the audio attributes are not set with {@link #setAudioAttributes(AudioAttributes)},
+ * attributes comprising {@link AudioAttributes#USAGE_MEDIA} will be used.
+ * <br>If the audio format is not specified or is incomplete, its sample rate will be the
+ * default output sample rate of the device (see
+ * {@link AudioManager#PROPERTY_OUTPUT_SAMPLE_RATE}), its channel configuration will be
+ * {@link AudioFormat#CHANNEL_OUT_STEREO} and the encoding will be
+ * {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * and the mode is {@link AudioTrack#MODE_STREAM}, the minimum buffer size is used.
+ * <br>If the transfer mode is not specified with {@link #setTransferMode(int)},
+ * <code>MODE_STREAM</code> will be used.
+ * <br>If the session ID is not specified with {@link #setSessionId(int)}, a new one will
+ * be generated.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+ private int mMode = MODE_STREAM;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Sets the {@link AudioAttributes}.
+ * @param attributes a non-null {@link AudioAttributes} instance that describes the audio
+ * data to be played.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be played by the {@link AudioTrack}.
+ * See {@link AudioFormat.Builder} for configuring the audio format parameters such
+ * as encoding, channel mask and sample rate.
+ * @param format a non-null {@link AudioFormat} instance.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioFormat(@NonNull AudioFormat format)
+ throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is read from for playback.
+ * If using the {@link AudioTrack} in streaming mode
+ * (see {@link AudioTrack#MODE_STREAM}, you can write data into this buffer in smaller
+ * chunks than this size. See {@link #getMinBufferSize(int, int, int)} to determine
+ * the minimum required buffer size for the successful creation of an AudioTrack instance
+ * in streaming mode. Using values smaller than <code>getMinBufferSize()</code> will result
+ * in an exception when trying to build the <code>AudioTrack</code>.
+ * <br>If using the <code>AudioTrack</code> in static mode (see
+ * {@link AudioTrack#MODE_STATIC}), this is the maximum size of the sound that will be
+ * played by this instance.
+ * @param bufferSizeInBytes
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setBufferSizeInBytes(int bufferSizeInBytes)
+ throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * Sets the mode under which buffers of audio data are transferred from the
+ * {@link AudioTrack} to the framework.
+ * @param mode one of {@link AudioTrack#MODE_STREAM}, {@link AudioTrack#MODE_STATIC}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setTransferMode(@TransferMode int mode)
+ throws IllegalArgumentException {
+ switch(mode) {
+ case MODE_STREAM:
+ case MODE_STATIC:
+ mMode = mode;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid transfer mode " + mode);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the session ID the {@link AudioTrack} will be attached to.
+ * @param sessionId a strictly positive ID number retrieved from another
+ * <code>AudioTrack</code> via {@link AudioTrack#getAudioSessionId()} or allocated by
+ * {@link AudioManager} via {@link AudioManager#generateAudioSessionId()}, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setSessionId(int sessionId)
+ throws IllegalArgumentException {
+ if ((sessionId != AudioManager.AUDIO_SESSION_ID_GENERATE) && (sessionId < 1)) {
+ throw new IllegalArgumentException("Invalid audio session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * Builds an {@link AudioTrack} instance initialized with all the parameters set
+ * on this <code>Builder</code>.
+ * @return a new {@link AudioTrack} instance.
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device.
+ */
+ public @NonNull AudioTrack build() throws UnsupportedOperationException {
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA)
+ .build();
+ }
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder()
+ .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ .setSampleRate(AudioSystem.getPrimaryOutputSamplingRate())
+ .setEncoding(AudioFormat.ENCODING_DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified in streaming mode,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mMode == MODE_STREAM && mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ return new AudioTrack(mAttributes, mFormat, mBufferSizeInBytes, mMode, mSessionId);
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
+ // mask of all the positional channels supported, however the allowed combinations
+ // are further restricted by the matching left/right rule and CHANNEL_COUNT_MAX
private static final int SUPPORTED_OUT_CHANNELS =
AudioFormat.CHANNEL_OUT_FRONT_LEFT |
AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
@@ -476,12 +710,12 @@ public class AudioTrack
// This is where constructor IllegalArgumentException-s are thrown
// postconditions:
// mChannelCount is valid
- // mChannels is valid
+ // mChannelMask is valid
// mAudioFormat is valid
// mSampleRate is valid
// mDataLoadMode is valid
- private void audioParamCheck(int sampleRateInHz,
- int channelConfig, int audioFormat, int mode) {
+ private void audioParamCheck(int sampleRateInHz, int channelConfig, int channelIndexMask,
+ int audioFormat, int mode) {
//--------------
// sample rate, note these values are subject to change
if (sampleRateInHz < SAMPLE_RATE_HZ_MIN || sampleRateInHz > SAMPLE_RATE_HZ_MAX) {
@@ -499,20 +733,40 @@ public class AudioTrack
case AudioFormat.CHANNEL_OUT_MONO:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
mChannelCount = 1;
- mChannels = AudioFormat.CHANNEL_OUT_MONO;
+ mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
break;
case AudioFormat.CHANNEL_OUT_STEREO:
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
mChannelCount = 2;
- mChannels = AudioFormat.CHANNEL_OUT_STEREO;
+ mChannelMask = AudioFormat.CHANNEL_OUT_STEREO;
break;
default:
+ if (channelConfig == AudioFormat.CHANNEL_INVALID && channelIndexMask != 0) {
+ mChannelCount = 0;
+ break; // channel index configuration only
+ }
if (!isMultichannelConfigSupported(channelConfig)) {
// input channel configuration features unsupported channels
throw new IllegalArgumentException("Unsupported channel configuration.");
}
- mChannels = channelConfig;
- mChannelCount = Integer.bitCount(channelConfig);
+ mChannelMask = channelConfig;
+ mChannelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
+ }
+ // check the channel index configuration (if present)
+ mChannelIndexMask = channelIndexMask;
+ if (mChannelIndexMask != 0) {
+ // restrictive: indexMask could allow up to AUDIO_CHANNEL_BITS_LOG2
+ final int indexMask = (1 << CHANNEL_COUNT_MAX) - 1;
+ if ((channelIndexMask & ~indexMask) != 0) {
+ throw new IllegalArgumentException("Unsupported channel index configuration "
+ + channelIndexMask);
+ }
+ int channelIndexCount = Integer.bitCount(channelIndexMask);
+ if (mChannelCount == 0) {
+ mChannelCount = channelIndexCount;
+ } else if (mChannelCount != channelIndexCount) {
+ throw new IllegalArgumentException("Channel count must match");
+ }
}
//--------------
@@ -521,7 +775,7 @@ public class AudioTrack
audioFormat = AudioFormat.ENCODING_PCM_16BIT;
}
- if (!AudioFormat.isValidEncoding(audioFormat)) {
+ if (!AudioFormat.isPublicEncoding(audioFormat)) {
throw new IllegalArgumentException("Unsupported audio encoding.");
}
mAudioFormat = audioFormat;
@@ -546,7 +800,7 @@ public class AudioTrack
loge("Channel configuration features unsupported channels");
return false;
}
- final int channelCount = Integer.bitCount(channelConfig);
+ final int channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
if (channelCount > CHANNEL_COUNT_MAX) {
loge("Channel configuration contains too many channels " +
channelCount + ">" + CHANNEL_COUNT_MAX);
@@ -591,8 +845,7 @@ public class AudioTrack
// To update when supporting compressed formats
int frameSizeInBytes;
if (AudioFormat.isEncodingLinearPcm(mAudioFormat)) {
- frameSizeInBytes = mChannelCount
- * (AudioFormat.getBytesPerSample(mAudioFormat));
+ frameSizeInBytes = mChannelCount * AudioFormat.getBytesPerSample(mAudioFormat);
} else {
frameSizeInBytes = 1;
}
@@ -658,15 +911,25 @@ public class AudioTrack
}
/**
- * Returns the current playback rate in Hz.
+ * Returns the current playback sample rate rate in Hz.
*/
public int getPlaybackRate() {
return native_get_playback_rate();
}
/**
- * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
- * and {@link AudioFormat#ENCODING_PCM_8BIT}.
+ * Returns the current playback parameters.
+ * See {@link #setPlaybackParams(PlaybackParams)} to set playback parameters
+ * @return current {@link PlaybackParams}.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public @NonNull PlaybackParams getPlaybackParams() {
+ return native_get_playback_params();
+ }
+
+ /**
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
*/
public int getAudioFormat() {
return mAudioFormat;
@@ -684,15 +947,37 @@ public class AudioTrack
}
/**
- * Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_OUT_MONO}
- * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
+ * Returns the configured channel position mask.
+ * <p> For example, refer to {@link AudioFormat#CHANNEL_OUT_MONO},
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}, {@link AudioFormat#CHANNEL_OUT_5POINT1}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask was used. Consider
+ * {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
*/
public int getChannelConfiguration() {
return mChannelConfiguration;
}
/**
+ * Returns the configured <code>AudioTrack</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioTrack</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelConfiguration != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelConfiguration);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
* Returns the configured number of channels.
*/
public int getChannelCount() {
@@ -703,9 +988,9 @@ public class AudioTrack
* Returns the state of the AudioTrack instance. This is useful after the
* AudioTrack instance has been created to check if it was initialized
* properly. This ensures that the appropriate resources have been acquired.
+ * @see #STATE_UNINITIALIZED
* @see #STATE_INITIALIZED
* @see #STATE_NO_STATIC_DATA
- * @see #STATE_UNINITIALIZED
*/
public int getState() {
return mState;
@@ -724,16 +1009,33 @@ public class AudioTrack
}
/**
- * Returns the "native frame count", derived from the bufferSizeInBytes specified at
- * creation time and converted to frame units.
- * If track's creation mode is {@link #MODE_STATIC},
- * it is equal to the specified bufferSizeInBytes converted to frame units.
- * If track's creation mode is {@link #MODE_STREAM},
- * it is typically greater than or equal to the specified bufferSizeInBytes converted to frame
- * units; it may be rounded up to a larger value if needed by the target device implementation.
- * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
- * See {@link AudioManager#getProperty(String)} for key
+ * Returns the frame count of the native <code>AudioTrack</code> buffer.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * it is equal to the specified bufferSizeInBytes on construction, converted to frame units.
+ * A static track's native frame count will not change.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * it is greater than or equal to the specified bufferSizeInBytes converted to frame units.
+ * For streaming tracks, this value may be rounded up to a larger value if needed by
+ * the target output sink, and
+ * if the track is subsequently routed to a different output sink, the native
+ * frame count may enlarge to accommodate.
+ * <p> If the <code>AudioTrack</code> encoding indicates compressed data,
+ * e.g. {@link AudioFormat#ENCODING_AC3}, then the frame count returned is
+ * the size of the native <code>AudioTrack</code> buffer in bytes.
+ * <p> See also {@link AudioManager#getProperty(String)} for key
* {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
+ * @return current size in frames of the <code>AudioTrack</code> buffer.
+ * @throws IllegalStateException
+ */
+ public int getBufferSizeInFrames() {
+ return native_get_native_frame_count();
+ }
+
+ /**
+ * Returns the frame count of the native <code>AudioTrack</code> buffer.
+ * @return current size in frames of the <code>AudioTrack</code> buffer.
+ * @throws IllegalStateException
+ * @deprecated Use the identical public method {@link #getBufferSizeInFrames()} instead.
*/
@Deprecated
protected int getNativeFrameCount() {
@@ -763,7 +1065,10 @@ public class AudioTrack
* unsigned 32-bits. That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
* This is a continuously advancing counter. It will wrap (overflow) periodically,
* for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
- * It is reset to zero by flush(), reload(), and stop().
+ * It is reset to zero by {@link #flush()}, {@link #reloadStaticData()}, and {@link #stop()}.
+ * If the track's creation mode is {@link #MODE_STATIC}, the return value indicates
+ * the total number of frames played since reset,
+ * <i>not</i> the current offset within the buffer.
*/
public int getPlaybackHeadPosition() {
return native_get_position();
@@ -820,16 +1125,15 @@ public class AudioTrack
channelCount = 2;
break;
default:
- if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
- // input channel configuration features unsupported channels
+ if (!isMultichannelConfigSupported(channelConfig)) {
loge("getMinBufferSize(): Invalid channel configuration.");
return ERROR_BAD_VALUE;
} else {
- channelCount = Integer.bitCount(channelConfig);
+ channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
}
}
- if (!AudioFormat.isValidEncoding(audioFormat)) {
+ if (!AudioFormat.isPublicEncoding(audioFormat)) {
loge("getMinBufferSize(): Invalid audio format.");
return ERROR_BAD_VALUE;
}
@@ -933,7 +1237,7 @@ public class AudioTrack
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
Handler handler) {
if (listener != null) {
- mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
+ mEventHandlerDelegate = new NativePositionEventHandlerDelegate(this, listener, handler);
} else {
mEventHandlerDelegate = null;
}
@@ -1014,6 +1318,10 @@ public class AudioTrack
* playback to last twice as long, but will also result in a pitch shift down by one octave.
* The valid sample rate range is from 1 Hz to twice the value returned by
* {@link #getNativeOutputSampleRate(int)}.
+ * Use {@link #setPlaybackParams(PlaybackParams)} for speed control.
+ * <p> This method may also be used to repurpose an existing <code>AudioTrack</code>
+ * for playback of content of differing sample rate,
+ * but with identical encoding and channel mask.
* @param sampleRateInHz the sample rate expressed in Hz
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
@@ -1030,6 +1338,28 @@ public class AudioTrack
/**
+ * Sets the playback parameters.
+ * This method returns failure if it cannot apply the playback parameters.
+ * One possible cause is that the parameters for speed or pitch are out of range.
+ * Another possible cause is that the <code>AudioTrack</code> is streaming
+ * (see {@link #MODE_STREAM}) and the
+ * buffer size is too small. For speeds greater than 1.0f, the <code>AudioTrack</code> buffer
+ * on configuration must be larger than the speed multiplied by the minimum size
+ * {@link #getMinBufferSize(int, int, int)}) to allow proper playback.
+ * @param params see {@link PlaybackParams}. In particular,
+ * speed, pitch, and audio mode should be set.
+ * @throws IllegalArgumentException if the parameters are invalid or not accepted.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public void setPlaybackParams(@NonNull PlaybackParams params) {
+ if (params == null) {
+ throw new IllegalArgumentException("params is null");
+ }
+ native_set_playback_params(params);
+ }
+
+
+ /**
* Sets the position of the notification marker. At most one marker can be active.
* @param markerInFrames marker position in wrapping frame units similar to
* {@link #getPlaybackHeadPosition}, or zero to disable the marker.
@@ -1048,7 +1378,8 @@ public class AudioTrack
/**
* Sets the period for the periodic notification event.
- * @param periodInFrames update period expressed in frames
+ * @param periodInFrames update period expressed in frames.
+ * Zero period means no position updates. A negative period is not allowed.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
*/
public int setPositionNotificationPeriod(int periodInFrames) {
@@ -1060,12 +1391,20 @@ public class AudioTrack
/**
- * Sets the playback head position.
+ * Sets the playback head position within the static buffer.
* The track must be stopped or paused for the position to be changed,
* and must use the {@link #MODE_STATIC} mode.
- * @param positionInFrames playback head position expressed in frames
+ * @param positionInFrames playback head position within buffer, expressed in frames.
* Zero corresponds to start of buffer.
* The position must not be greater than the buffer size in frames, or negative.
+ * Though this method and {@link #getPlaybackHeadPosition()} have similar names,
+ * the position values have different meanings.
+ * <br>
+ * If looping is currently enabled and the new position is greater than or equal to the
+ * loop end marker, the behavior varies by API level:
+ * as of {@link android.os.Build.VERSION_CODES#MNC},
+ * the looping is first disabled and then the position is set.
+ * For earlier API levels, the behavior is unspecified.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -1085,17 +1424,29 @@ public class AudioTrack
* Similarly to setPlaybackHeadPosition,
* the track must be stopped or paused for the loop points to be changed,
* and must use the {@link #MODE_STATIC} mode.
- * @param startInFrames loop start marker expressed in frames
+ * @param startInFrames loop start marker expressed in frames.
* Zero corresponds to start of buffer.
* The start marker must not be greater than or equal to the buffer size in frames, or negative.
- * @param endInFrames loop end marker expressed in frames
+ * @param endInFrames loop end marker expressed in frames.
* The total buffer size in frames corresponds to end of buffer.
* The end marker must not be greater than the buffer size in frames.
* For looping, the end marker must not be less than or equal to the start marker,
* but to disable looping
* it is permitted for start marker, end marker, and loop count to all be 0.
- * @param loopCount the number of times the loop is looped.
+ * If any input parameters are out of range, this method returns {@link #ERROR_BAD_VALUE}.
+ * If the loop period (endInFrames - startInFrames) is too small for the implementation to
+ * support,
+ * {@link #ERROR_BAD_VALUE} is returned.
+ * The loop range is the interval [startInFrames, endInFrames).
+ * <br>
+ * As of {@link android.os.Build.VERSION_CODES#MNC}, the position is left unchanged,
+ * unless it is greater than or equal to the loop end marker, in which case
+ * it is forced to the loop start marker.
+ * For earlier API levels, the effect on position is unspecified.
+ * @param loopCount the number of times the loop is looped; must be greater than or equal to -1.
* A value of -1 means infinite looping, and 0 disables looping.
+ * A value of positive N means to "loop" (go back) N times. For example,
+ * a value of one means to play the region two times in total.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -1131,9 +1482,27 @@ public class AudioTrack
//--------------------
/**
* Starts playing an AudioTrack.
- * If track's creation mode is {@link #MODE_STATIC}, you must have called write() prior.
+ * <p>
+ * If track's creation mode is {@link #MODE_STATIC}, you must have called one of
+ * the write methods ({@link #write(byte[], int, int)}, {@link #write(byte[], int, int, int)},
+ * {@link #write(short[], int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, or {@link #write(ByteBuffer, int, int)}) prior to
+ * play().
+ * <p>
+ * If the mode is {@link #MODE_STREAM}, you can optionally prime the data path prior to
+ * calling play(), by writing up to <code>bufferSizeInBytes</code> (from constructor).
+ * If you don't call write() first, or if you call write() but with an insufficient amount of
+ * data, then the track will be in underrun state at play(). In this case,
+ * playback will not actually start playing until the data path is filled to a
+ * device-specific minimum level. This requirement for the path to be filled
+ * to a minimum level is also true when resuming audio playback after calling stop().
+ * Similarly the buffer will need to be filled up again after
+ * the track underruns due to failure to call write() in a timely manner with sufficient data.
+ * For portability, an application should prime the data path to the maximum allowed
+ * by writing data until the write() method returns a short transfer count.
+ * This allows play() to start immediately, and reduces the chance of underrun.
*
- * @throws IllegalStateException
+ * @throws IllegalStateException if the track isn't properly initialized
*/
public void play()
throws IllegalStateException {
@@ -1150,6 +1519,9 @@ public class AudioTrack
}
private boolean isRestricted() {
+ if ((mAttributes.getAllFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
+ return false;
+ }
try {
final int usage = AudioAttributes.usageForLegacyStreamType(mStreamType);
final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, usage,
@@ -1178,6 +1550,8 @@ public class AudioTrack
synchronized(mPlayStateLock) {
native_stop();
mPlayState = PLAYSTATE_STOPPED;
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
}
}
@@ -1209,13 +1583,21 @@ public class AudioTrack
/**
* Flushes the audio data currently queued for playback. Any data that has
- * not been played back will be discarded. No-op if not stopped or paused,
+ * been written but not yet presented will be discarded. No-op if not stopped or paused,
* or if the track's creation mode is not {@link #MODE_STREAM}.
+ * <BR> Note that although data written but not yet presented is discarded, there is no
+ * guarantee that all of the buffer space formerly used by that data
+ * is available for a subsequent write.
+ * For example, a call to {@link #write(byte[], int, int)} with <code>sizeInBytes</code>
+ * less than or equal to the total buffer size
+ * may return a short actual transfer count.
*/
public void flush() {
if (mState == STATE_INITIALIZED) {
// flush the data in native layer
native_flush();
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
}
}
@@ -1223,29 +1605,85 @@ public class AudioTrack
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
- * In streaming mode, will block until all data has been written to the audio sink.
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the write will normally block until all the data has been enqueued for
+ * playback, and will return a full transfer count. However, if the track is stopped or paused
+ * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
* In static buffer mode, copies the data to the buffer starting at offset 0.
- * Note that the actual playback of this data might occur after this function
- * returns. This function is thread safe with respect to {@link #stop} calls,
- * in which case all of the specified data might not be written to the audio sink.
+ * Note that the actual playback of this data might occur after this function returns.
*
* @param audioData the array that holds the data to play.
* @param offsetInBytes the offset expressed in bytes in audioData where the data to play
* starts.
* @param sizeInBytes the number of bytes to read in audioData after the offset.
- * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
- * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * @return zero or the positive number of bytes that were written, or
+ * {@link #ERROR_INVALID_OPERATION}
+ * if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes, or
* {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
* needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ *
+ * This is equivalent to {@link #write(byte[], int, int, int)} with <code>writeMode</code>
+ * set to {@link #WRITE_BLOCKING}.
*/
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return write(audioData, offsetInBytes, sizeInBytes, WRITE_BLOCKING);
+ }
- public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
+ * starts.
+ * @param sizeInBytes the number of bytes to read in audioData after the offset.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of bytes that were written, or
+ * {@link #ERROR_INVALID_OPERATION}
+ * if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ */
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
|| (offsetInBytes + sizeInBytes < 0) // detect integer overflow
|| (offsetInBytes + sizeInBytes > audioData.length)) {
@@ -1253,7 +1691,7 @@ public class AudioTrack
}
int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat,
- true /*isBlocking*/);
+ writeMode == WRITE_BLOCKING);
if ((mDataLoadMode == MODE_STATIC)
&& (mState == STATE_NO_STATIC_DATA)
@@ -1265,38 +1703,95 @@ public class AudioTrack
return ret;
}
-
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
- * In streaming mode, will block until all data has been written to the audio sink.
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the write will normally block until all the data has been enqueued for
+ * playback, and will return a full transfer count. However, if the track is stopped or paused
+ * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
* In static buffer mode, copies the data to the buffer starting at offset 0.
- * Note that the actual playback of this data might occur after this function
- * returns. This function is thread safe with respect to {@link #stop} calls,
- * in which case all of the specified data might not be written to the audio sink.
+ * Note that the actual playback of this data might occur after this function returns.
*
* @param audioData the array that holds the data to play.
* @param offsetInShorts the offset expressed in shorts in audioData where the data to play
* starts.
* @param sizeInShorts the number of shorts to read in audioData after the offset.
- * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
- * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
- * the parameters don't resolve to valid data and indexes.
+ * @return zero or the positive number of shorts that were written, or
+ * {@link #ERROR_INVALID_OPERATION}
+ * if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ *
+ * This is equivalent to {@link #write(short[], int, int, int)} with <code>writeMode</code>
+ * set to {@link #WRITE_BLOCKING}.
*/
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return write(audioData, offsetInShorts, sizeInShorts, WRITE_BLOCKING);
+ }
- public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
+ * starts.
+ * @param sizeInShorts the number of shorts to read in audioData after the offset.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of shorts that were written, or
+ * {@link #ERROR_INVALID_OPERATION}
+ * if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ */
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
|| (offsetInShorts + sizeInShorts < 0) // detect integer overflow
|| (offsetInShorts + sizeInShorts > audioData.length)) {
return ERROR_BAD_VALUE;
}
- int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
+ int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
if ((mDataLoadMode == MODE_STATIC)
&& (mState == STATE_NO_STATIC_DATA)
@@ -1308,18 +1803,23 @@ public class AudioTrack
return ret;
}
-
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
- * In static buffer mode, copies the data to the buffer starting at offset 0,
- * and the write mode is ignored.
- * In streaming mode, the blocking behavior will depend on the write mode.
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
* <p>
- * Note that the actual playback of this data might occur after this function
- * returns. This function is thread safe with respect to {@link #stop} calls,
- * in which case all of the specified data might not be written to the audio sink.
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
* <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
* @param audioData the array that holds the data to play.
* The implementation does not clip for sample values within the nominal range
* [-1.0f, 1.0f], provided that all gains in the audio pipeline are
@@ -1335,15 +1835,20 @@ public class AudioTrack
* @param sizeInFloats the number of floats to read in audioData after the offset.
* @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
* effect in static mode.
- * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
* to the audio sink.
- * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
* queuing as much audio data for playback as possible without blocking.
- * @return the number of floats that were written, or {@link #ERROR_INVALID_OPERATION}
- * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
- * the parameters don't resolve to valid data and indexes.
+ * @return zero or the positive number of floats that were written, or
+ * {@link #ERROR_INVALID_OPERATION}
+ * if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
*/
- public int write(float[] audioData, int offsetInFloats, int sizeInFloats,
+ public int write(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
@WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED) {
@@ -1385,9 +1890,19 @@ public class AudioTrack
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
- * In static buffer mode, copies the data to the buffer starting at its 0 offset, and the write
- * mode is ignored.
- * In streaming mode, the blocking behavior will depend on the write mode.
+ * The audioData in ByteBuffer should match the format specified in the AudioTrack constructor.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
* @param audioData the buffer that holds the data to play, starting at the position reported
* by <code>audioData.position()</code>.
* <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
@@ -1401,10 +1916,14 @@ public class AudioTrack
* to the audio sink.
* <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
* queuing as much audio data for playback as possible without blocking.
- * @return 0 or a positive number of bytes that were written, or
- * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}
- */
- public int write(ByteBuffer audioData, int sizeInBytes,
+ * @return zero or the positive number of bytes that were written, or
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ */
+ public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
@WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED) {
@@ -1449,9 +1968,99 @@ public class AudioTrack
}
/**
- * Notifies the native resource to reuse the audio data already loaded in the native
- * layer, that is to rewind to start of buffer.
- * The track's creation mode must be {@link #MODE_STATIC}.
+ * Writes the audio data to the audio sink for playback in streaming mode on a HW_AV_SYNC track.
+ * The blocking behavior will depend on the write mode.
+ * @param audioData the buffer that holds the data to play, starting at the position reported
+ * by <code>audioData.position()</code>.
+ * <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
+ * have been advanced to reflect the amount of data that was successfully written to
+ * the AudioTrack.
+ * @param sizeInBytes number of bytes to write.
+ * <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}.
+ * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @param timestamp The timestamp of the first decodable audio frame in the provided audioData.
+ * @return zero or a positive number of bytes that were written, or
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * The dead object error code is not returned if some data was successfully transferred.
+ * In this case, the error is returned at the next write().
+ */
+ public int write(ByteBuffer audioData, int sizeInBytes,
+ @WriteMode int writeMode, long timestamp) {
+
+ if ((mAttributes.getFlags() & AudioAttributes.FLAG_HW_AV_SYNC) == 0) {
+ Log.d(TAG, "AudioTrack.write() called on a regular AudioTrack. Ignoring pts...");
+ return write(audioData, sizeInBytes, writeMode);
+ }
+
+ if ((audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
+ Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
+ return ERROR_BAD_VALUE;
+ }
+
+ // create timestamp header if none exists
+ if (mAvSyncHeader == null) {
+ mAvSyncHeader = ByteBuffer.allocate(16);
+ mAvSyncHeader.order(ByteOrder.BIG_ENDIAN);
+ mAvSyncHeader.putInt(0x55550001);
+ mAvSyncHeader.putInt(sizeInBytes);
+ mAvSyncHeader.putLong(timestamp);
+ mAvSyncHeader.position(0);
+ mAvSyncBytesRemaining = sizeInBytes;
+ }
+
+ // write timestamp header if not completely written already
+ int ret = 0;
+ if (mAvSyncHeader.remaining() != 0) {
+ ret = write(mAvSyncHeader, mAvSyncHeader.remaining(), writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write timestamp header!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+ if (mAvSyncHeader.remaining() > 0) {
+ Log.v(TAG, "AudioTrack.write() partial timestamp header written.");
+ return 0;
+ }
+ }
+
+ // write audio data
+ int sizeToWrite = Math.min(mAvSyncBytesRemaining, sizeInBytes);
+ ret = write(audioData, sizeToWrite, writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write audio data!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+
+ mAvSyncBytesRemaining -= ret;
+ if (mAvSyncBytesRemaining == 0) {
+ mAvSyncHeader = null;
+ }
+
+ return ret;
+ }
+
+
+ /**
+ * Sets the playback head position within the static buffer to zero,
+ * that is it rewinds to start of static buffer.
+ * The track must be stopped or paused, and
+ * the track's creation mode must be {@link #MODE_STATIC}.
+ * <p>
+ * As of {@link android.os.Build.VERSION_CODES#MNC}, also resets the value returned by
+ * {@link #getPlaybackHeadPosition()} to zero.
+ * For earlier API levels, the reset behavior is unspecified.
+ * <p>
+ * Use {@link #setPlaybackHeadPosition(int)} with a zero position
+ * if the reset of <code>getPlaybackHeadPosition()</code> is not needed.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -1520,6 +2129,144 @@ public class AudioTrack
return err == 0 ? SUCCESS : ERROR;
}
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the output from this AudioTrack.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio output device.
+ */
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSink()) {
+ return false;
+ }
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setOutputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected output specified by {@link #setPreferredDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for playback.
+ */
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Defines the interface by which applications can receive notifications of routing
+ * changes for the associated {@link AudioTrack}.
+ */
+ public interface OnRoutingChangedListener {
+ /**
+ * Called when the routing of an AudioTrack changes from either and explicit or
+ * policy rerouting. Use {@link #getRoutedDevice()} to retrieve the newly routed-to
+ * device.
+ */
+ public void onRoutingChanged(AudioTrack audioTrack);
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioTrack.
+ * Note: The query is only valid if the AudioTrack is currently playing. If it is not,
+ * <code>getRoutedDevice()</code> will return null.
+ */
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_OUTPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /**
+ * The list of AudioTrack.OnRoutingChangedListener interfaces added (with
+ * {@link AudioTrack#addOnRoutingChangedListener(OnRoutingChangedListener, android.os.Handler)}
+ * by an app to receive (re)routing notifications.
+ */
+ private ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>
+ mRoutingChangeListeners =
+ new ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>();
+
+ /**
+ * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
+ * on this AudioTrack.
+ * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
+ * of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
+ android.os.Handler handler) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback();
+ }
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener, handler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
+ */
+ public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ }
+ if (mRoutingChangeListeners.size() == 0) {
+ native_disableDeviceCallback();
+ }
+ }
+ }
+
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ Collection<NativeRoutingEventHandlerDelegate> values;
+ synchronized (mRoutingChangeListeners) {
+ values = mRoutingChangeListeners.values();
+ }
+ AudioManager.resetAudioPortGeneration();
+ for(NativeRoutingEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+
//---------------------------------------------------------
// Interface definitions
//--------------------
@@ -1548,10 +2295,10 @@ public class AudioTrack
* Helper class to handle the forwarding of native events to the appropriate listener
* (potentially) handled in a different thread
*/
- private class NativeEventHandlerDelegate {
+ private class NativePositionEventHandlerDelegate {
private final Handler mHandler;
- NativeEventHandlerDelegate(final AudioTrack track,
+ NativePositionEventHandlerDelegate(final AudioTrack track,
final OnPlaybackPositionUpdateListener listener,
Handler handler) {
// find the looper for our new event handler
@@ -1599,6 +2346,55 @@ public class AudioTrack
}
}
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioTrack track,
+ final OnRoutingChangedListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioTrack was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (track == null) {
+ return;
+ }
+ switch(msg.what) {
+ case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onRoutingChanged(track);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
//---------------------------------------------------------
// Java methods called from the native side
@@ -1612,7 +2408,11 @@ public class AudioTrack
return;
}
- NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
+ if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
+ track.broadcastRoutingChange();
+ return;
+ }
+ NativePositionEventHandlerDelegate delegate = track.mEventHandlerDelegate;
if (delegate != null) {
Handler handler = delegate.getHandler();
if (handler != null) {
@@ -1620,7 +2420,6 @@ public class AudioTrack
handler.sendMessage(m);
}
}
-
}
@@ -1633,7 +2432,7 @@ public class AudioTrack
// AudioAttributes.USAGE_MEDIA will map to AudioManager.STREAM_MUSIC
private native final int native_setup(Object /*WeakReference<AudioTrack>*/ audiotrack_this,
Object /*AudioAttributes*/ attributes,
- int sampleRate, int channelMask, int audioFormat,
+ int sampleRate, int channelMask, int channelIndexMask, int audioFormat,
int buffSizeInBytes, int mode, int[] sessionId);
private native final void native_finalize();
@@ -1653,7 +2452,8 @@ public class AudioTrack
boolean isBlocking);
private native final int native_write_short(short[] audioData,
- int offsetInShorts, int sizeInShorts, int format);
+ int offsetInShorts, int sizeInShorts, int format,
+ boolean isBlocking);
private native final int native_write_float(float[] audioData,
int offsetInFloats, int sizeInFloats, int format,
@@ -1671,6 +2471,9 @@ public class AudioTrack
private native final int native_set_playback_rate(int sampleRateInHz);
private native final int native_get_playback_rate();
+ private native final void native_set_playback_params(@NonNull PlaybackParams params);
+ private native final @NonNull PlaybackParams native_get_playback_params();
+
private native final int native_set_marker_pos(int marker);
private native final int native_get_marker_pos();
@@ -1696,6 +2499,11 @@ public class AudioTrack
private native final int native_attachAuxEffect(int effectId);
private native final int native_setAuxEffectSendLevel(float level);
+ private native final boolean native_setOutputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback();
+ private native final void native_disableDeviceCallback();
+
//---------------------------------------------------------
// Utility methods
//------------------
@@ -1707,5 +2515,4 @@ public class AudioTrack
private static void loge(String msg) {
Log.e(TAG, msg);
}
-
}
diff --git a/media/java/android/media/ClosedCaptionRenderer.java b/media/java/android/media/ClosedCaptionRenderer.java
index d34b21b..e3680e9 100644
--- a/media/java/android/media/ClosedCaptionRenderer.java
+++ b/media/java/android/media/ClosedCaptionRenderer.java
@@ -154,6 +154,7 @@ class CCParser {
private int mMode = MODE_PAINT_ON;
private int mRollUpSize = 4;
+ private int mPrevCtrlCode = INVALID;
private CCMemory mDisplay = new CCMemory();
private CCMemory mNonDisplay = new CCMemory();
@@ -260,6 +261,13 @@ class CCParser {
private boolean handleCtrlCode(CCData ccData) {
int ctrlCode = ccData.getCtrlCode();
+
+ if (mPrevCtrlCode != INVALID && mPrevCtrlCode == ctrlCode) {
+ // discard double ctrl codes (but if there's a 3rd one, we still take that)
+ mPrevCtrlCode = INVALID;
+ return true;
+ }
+
switch(ctrlCode) {
case RCL:
// select pop-on style
@@ -325,10 +333,12 @@ class CCParser {
break;
case INVALID:
default:
- // not handled
+ mPrevCtrlCode = INVALID;
return false;
}
+ mPrevCtrlCode = ctrlCode;
+
// handled
return true;
}
diff --git a/media/java/android/media/DataSource.java b/media/java/android/media/DataSource.java
deleted file mode 100644
index 347bd5f..0000000
--- a/media/java/android/media/DataSource.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package android.media;
-
-import java.io.Closeable;
-
-/**
- * An abstraction for a media data source, e.g. a file or an http stream
- * {@hide}
- */
-public interface DataSource extends Closeable {
- /**
- * Reads data from the data source at the requested position
- *
- * @param offset where in the source to read
- * @param buffer the buffer to read the data into
- * @param size how many bytes to read
- * @return the number of bytes read, or -1 if there was an error
- */
- public int readAt(long offset, byte[] buffer, int size);
-
- /**
- * Gets the size of the data source.
- *
- * @return size of data source, or -1 if the length is unknown
- */
- public long getSize();
-}
diff --git a/media/java/android/media/FocusRequester.java b/media/java/android/media/FocusRequester.java
deleted file mode 100644
index bbe5fd2..0000000
--- a/media/java/android/media/FocusRequester.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.annotation.NonNull;
-import android.media.MediaFocusControl.AudioFocusDeathHandler;
-import android.os.IBinder;
-import android.util.Log;
-
-import java.io.PrintWriter;
-
-/**
- * @hide
- * Class to handle all the information about a user of audio focus. The lifecycle of each
- * instance is managed by android.media.MediaFocusControl, from its addition to the audio focus
- * stack to its release.
- */
-class FocusRequester {
-
- // on purpose not using this classe's name, as it will only be used from MediaFocusControl
- private static final String TAG = "MediaFocusControl";
- private static final boolean DEBUG = false;
-
- private AudioFocusDeathHandler mDeathHandler;
- private final IAudioFocusDispatcher mFocusDispatcher; // may be null
- private final IBinder mSourceRef;
- private final String mClientId;
- private final String mPackageName;
- private final int mCallingUid;
- private final MediaFocusControl mFocusController; // never null
- /**
- * the audio focus gain request that caused the addition of this object in the focus stack.
- */
- private final int mFocusGainRequest;
- /**
- * the flags associated with the gain request that qualify the type of grant (e.g. accepting
- * delay vs grant must be immediate)
- */
- private final int mGrantFlags;
- /**
- * the audio focus loss received my mFocusDispatcher, is AudioManager.AUDIOFOCUS_NONE if
- * it never lost focus.
- */
- private int mFocusLossReceived;
- /**
- * the audio attributes associated with the focus request
- */
- private final AudioAttributes mAttributes;
-
- /**
- * Class constructor
- * @param aa
- * @param focusRequest
- * @param grantFlags
- * @param afl
- * @param source
- * @param id
- * @param hdlr
- * @param pn
- * @param uid
- * @param ctlr cannot be null
- */
- FocusRequester(AudioAttributes aa, int focusRequest, int grantFlags,
- IAudioFocusDispatcher afl, IBinder source, String id, AudioFocusDeathHandler hdlr,
- String pn, int uid, @NonNull MediaFocusControl ctlr) {
- mAttributes = aa;
- mFocusDispatcher = afl;
- mSourceRef = source;
- mClientId = id;
- mDeathHandler = hdlr;
- mPackageName = pn;
- mCallingUid = uid;
- mFocusGainRequest = focusRequest;
- mGrantFlags = grantFlags;
- mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
- mFocusController = ctlr;
- }
-
-
- boolean hasSameClient(String otherClient) {
- try {
- return mClientId.compareTo(otherClient) == 0;
- } catch (NullPointerException e) {
- return false;
- }
- }
-
- boolean isLockedFocusOwner() {
- return ((mGrantFlags & AudioManager.AUDIOFOCUS_FLAG_LOCK) != 0);
- }
-
- boolean hasSameBinder(IBinder ib) {
- return (mSourceRef != null) && mSourceRef.equals(ib);
- }
-
- boolean hasSamePackage(String pack) {
- try {
- return mPackageName.compareTo(pack) == 0;
- } catch (NullPointerException e) {
- return false;
- }
- }
-
- boolean hasSameUid(int uid) {
- return mCallingUid == uid;
- }
-
- String getClientId() {
- return mClientId;
- }
-
- int getGainRequest() {
- return mFocusGainRequest;
- }
-
- int getGrantFlags() {
- return mGrantFlags;
- }
-
- AudioAttributes getAudioAttributes() {
- return mAttributes;
- }
-
-
- private static String focusChangeToString(int focus) {
- switch(focus) {
- case AudioManager.AUDIOFOCUS_NONE:
- return "none";
- case AudioManager.AUDIOFOCUS_GAIN:
- return "GAIN";
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
- return "GAIN_TRANSIENT";
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
- return "GAIN_TRANSIENT_MAY_DUCK";
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
- return "GAIN_TRANSIENT_EXCLUSIVE";
- case AudioManager.AUDIOFOCUS_LOSS:
- return "LOSS";
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
- return "LOSS_TRANSIENT";
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
- return "LOSS_TRANSIENT_CAN_DUCK";
- default:
- return "[invalid focus change" + focus + "]";
- }
- }
-
- private String focusGainToString() {
- return focusChangeToString(mFocusGainRequest);
- }
-
- private String focusLossToString() {
- return focusChangeToString(mFocusLossReceived);
- }
-
- private static String flagsToString(int flags) {
- String msg = new String();
- if ((flags & AudioManager.AUDIOFOCUS_FLAG_DELAY_OK) != 0) {
- msg += "DELAY_OK";
- }
- if ((flags & AudioManager.AUDIOFOCUS_FLAG_LOCK) != 0) {
- if (!msg.isEmpty()) { msg += "|"; }
- msg += "LOCK";
- }
- if ((flags & AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS) != 0) {
- if (!msg.isEmpty()) { msg += "|"; }
- msg += "PAUSES_ON_DUCKABLE_LOSS";
- }
- return msg;
- }
-
- void dump(PrintWriter pw) {
- pw.println(" source:" + mSourceRef
- + " -- pack: " + mPackageName
- + " -- client: " + mClientId
- + " -- gain: " + focusGainToString()
- + " -- flags: " + flagsToString(mGrantFlags)
- + " -- loss: " + focusLossToString()
- + " -- uid: " + mCallingUid
- + " -- attr: " + mAttributes);
- }
-
-
- void release() {
- try {
- if (mSourceRef != null && mDeathHandler != null) {
- mSourceRef.unlinkToDeath(mDeathHandler, 0);
- mDeathHandler = null;
- }
- } catch (java.util.NoSuchElementException e) {
- Log.e(TAG, "FocusRequester.release() hit ", e);
- }
- }
-
- @Override
- protected void finalize() throws Throwable {
- release();
- super.finalize();
- }
-
- /**
- * For a given audio focus gain request, return the audio focus loss type that will result
- * from it, taking into account any previous focus loss.
- * @param gainRequest
- * @return the audio focus loss type that matches the gain request
- */
- private int focusLossForGainRequest(int gainRequest) {
- switch(gainRequest) {
- case AudioManager.AUDIOFOCUS_GAIN:
- switch(mFocusLossReceived) {
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
- case AudioManager.AUDIOFOCUS_LOSS:
- case AudioManager.AUDIOFOCUS_NONE:
- return AudioManager.AUDIOFOCUS_LOSS;
- }
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
- switch(mFocusLossReceived) {
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
- case AudioManager.AUDIOFOCUS_NONE:
- return AudioManager.AUDIOFOCUS_LOSS_TRANSIENT;
- case AudioManager.AUDIOFOCUS_LOSS:
- return AudioManager.AUDIOFOCUS_LOSS;
- }
- case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
- switch(mFocusLossReceived) {
- case AudioManager.AUDIOFOCUS_NONE:
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
- return AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK;
- case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
- return AudioManager.AUDIOFOCUS_LOSS_TRANSIENT;
- case AudioManager.AUDIOFOCUS_LOSS:
- return AudioManager.AUDIOFOCUS_LOSS;
- }
- default:
- Log.e(TAG, "focusLossForGainRequest() for invalid focus request "+ gainRequest);
- return AudioManager.AUDIOFOCUS_NONE;
- }
- }
-
- /**
- * Called synchronized on MediaFocusControl.mAudioFocusLock
- */
- void handleExternalFocusGain(int focusGain) {
- int focusLoss = focusLossForGainRequest(focusGain);
- handleFocusLoss(focusLoss);
- }
-
- /**
- * Called synchronized on MediaFocusControl.mAudioFocusLock
- */
- void handleFocusGain(int focusGain) {
- try {
- mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
- mFocusController.notifyExtPolicyFocusGrant_syncAf(toAudioFocusInfo(),
- AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
- if (mFocusDispatcher != null) {
- if (DEBUG) {
- Log.v(TAG, "dispatching " + focusChangeToString(focusGain) + " to "
- + mClientId);
- }
- mFocusDispatcher.dispatchAudioFocusChange(focusGain, mClientId);
- }
- } catch (android.os.RemoteException e) {
- Log.e(TAG, "Failure to signal gain of audio focus due to: ", e);
- }
- }
-
- /**
- * Called synchronized on MediaFocusControl.mAudioFocusLock
- */
- void handleFocusLoss(int focusLoss) {
- try {
- if (focusLoss != mFocusLossReceived) {
- mFocusLossReceived = focusLoss;
- // before dispatching a focus loss, check if the following conditions are met:
- // 1/ the framework is not supposed to notify the focus loser on a DUCK loss
- // 2/ it is a DUCK loss
- // 3/ the focus loser isn't flagged as pausing in a DUCK loss
- // if they are, do not notify the focus loser
- if (!mFocusController.mustNotifyFocusOwnerOnDuck()
- && mFocusLossReceived == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK
- && (mGrantFlags
- & AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS) == 0) {
- if (DEBUG) {
- Log.v(TAG, "NOT dispatching " + focusChangeToString(mFocusLossReceived)
- + " to " + mClientId + ", to be handled externally");
- }
- mFocusController.notifyExtPolicyFocusLoss_syncAf(
- toAudioFocusInfo(), false /* wasDispatched */);
- return;
- }
- if (mFocusDispatcher != null) {
- if (DEBUG) {
- Log.v(TAG, "dispatching " + focusChangeToString(mFocusLossReceived) + " to "
- + mClientId);
- }
- mFocusController.notifyExtPolicyFocusLoss_syncAf(
- toAudioFocusInfo(), true /* wasDispatched */);
- mFocusDispatcher.dispatchAudioFocusChange(mFocusLossReceived, mClientId);
- }
- }
- } catch (android.os.RemoteException e) {
- Log.e(TAG, "Failure to signal loss of audio focus due to:", e);
- }
- }
-
- AudioFocusInfo toAudioFocusInfo() {
- return new AudioFocusInfo(mAttributes, mClientId, mPackageName,
- mFocusGainRequest, mFocusLossReceived, mGrantFlags);
- }
-}
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index fad3cec..8e96218 100644
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -29,6 +29,7 @@ import android.media.IRemoteVolumeObserver;
import android.media.IRingtonePlayer;
import android.media.IVolumeController;
import android.media.Rating;
+import android.media.VolumePolicy;
import android.media.audiopolicy.AudioPolicyConfig;
import android.media.audiopolicy.IAudioPolicyCallback;
import android.net.Uri;
@@ -40,42 +41,30 @@ import android.view.KeyEvent;
interface IAudioService {
void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags,
- String callingPackage);
+ String callingPackage, String caller);
void adjustStreamVolume(int streamType, int direction, int flags, String callingPackage);
- void adjustMasterVolume(int direction, int flags, String callingPackage);
-
void setStreamVolume(int streamType, int index, int flags, String callingPackage);
oneway void setRemoteStreamVolume(int index);
- void setMasterVolume(int index, int flags, String callingPackage);
-
- void setStreamSolo(int streamType, boolean state, IBinder cb);
-
- void setStreamMute(int streamType, boolean state, IBinder cb);
-
boolean isStreamMute(int streamType);
void forceRemoteSubmixFullVolume(boolean startForcing, IBinder cb);
- void setMasterMute(boolean state, int flags, String callingPackage, IBinder cb);
-
boolean isMasterMute();
+ void setMasterMute(boolean mute, int flags, String callingPackage);
+
int getStreamVolume(int streamType);
- int getMasterVolume();
+ int getStreamMinVolume(int streamType);
int getStreamMaxVolume(int streamType);
- int getMasterMaxVolume();
-
int getLastAudibleStreamVolume(int streamType);
- int getLastAudibleMasterVolume();
-
void setMicrophoneMute(boolean on, String callingPackage);
void setRingerModeExternal(int ringerMode, String caller);
@@ -94,7 +83,7 @@ interface IAudioService {
boolean shouldVibrate(int vibrateType);
- void setMode(int mode, IBinder cb);
+ void setMode(int mode, IBinder cb, String callingPackage);
int getMode();
@@ -193,9 +182,11 @@ interface IAudioService {
void setRingtonePlayer(IRingtonePlayer player);
IRingtonePlayer getRingtonePlayer();
- int getMasterStreamType();
+ int getUiSoundsStreamType();
+
+ void setWiredDeviceConnectionState(int type, int state, String address, String name,
+ String caller);
- void setWiredDeviceConnectionState(int device, int state, String name);
int setBluetoothA2dpDeviceConnectionState(in BluetoothDevice device, int state, int profile);
AudioRoutesInfo startWatchingRoutes(in IAudioRoutesObserver observer);
@@ -208,15 +199,20 @@ interface IAudioService {
boolean isStreamAffectedByRingerMode(int streamType);
- void disableSafeMediaVolume();
+ boolean isStreamAffectedByMute(int streamType);
+
+ void disableSafeMediaVolume(String callingPackage);
int setHdmiSystemAudioSupported(boolean on);
boolean isHdmiSystemAudioSupported();
- String registerAudioPolicy(in AudioPolicyConfig policyConfig,
- in IAudioPolicyCallback pcb, boolean hasFocusListener);
+ String registerAudioPolicy(in AudioPolicyConfig policyConfig,
+ in IAudioPolicyCallback pcb, boolean hasFocusListener);
+
oneway void unregisterAudioPolicyAsync(in IAudioPolicyCallback pcb);
- int setFocusPropertiesForPolicy(int duckingBehavior, in IAudioPolicyCallback pcb);
+ int setFocusPropertiesForPolicy(int duckingBehavior, in IAudioPolicyCallback pcb);
+
+ void setVolumePolicy(in VolumePolicy policy);
}
diff --git a/media/java/android/media/IVolumeController.aidl b/media/java/android/media/IVolumeController.aidl
index e3593a6..90ac416 100644
--- a/media/java/android/media/IVolumeController.aidl
+++ b/media/java/android/media/IVolumeController.aidl
@@ -27,8 +27,6 @@ oneway interface IVolumeController {
void volumeChanged(int streamType, int flags);
- void masterVolumeChanged(int flags);
-
void masterMuteChanged(int flags);
void setLayoutDirection(int layoutDirection);
diff --git a/media/java/android/media/Image.java b/media/java/android/media/Image.java
index 53ab264..e18e9a3 100644
--- a/media/java/android/media/Image.java
+++ b/media/java/android/media/Image.java
@@ -50,10 +50,25 @@ public abstract class Image implements AutoCloseable {
/**
* @hide
*/
+ protected boolean mIsImageValid = false;
+
+ /**
+ * @hide
+ */
protected Image() {
}
/**
+ * Throw IllegalStateException if the image is invalid (already closed).
+ *
+ * @hide
+ */
+ protected void throwISEIfImageIsInvalid() {
+ if (!mIsImageValid) {
+ throw new IllegalStateException("Image is already closed");
+ }
+ }
+ /**
* Get the format for this image. This format determines the number of
* ByteBuffers needed to represent the image, and the general layout of the
* pixel data in each in ByteBuffer.
@@ -86,6 +101,38 @@ public abstract class Image implements AutoCloseable {
* Each plane has its own row stride and pixel stride.</td>
* </tr>
* <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have half the width and the full height of the luminance
+ * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have the same width and height as that of the luminance
+ * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
+ * <td>3</td>
+ * <td>A R (red) plane followed by the G (green) and B (blue) planes.
+ * All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
+ * <td>4</td>
+ * <td>A R (red) plane followed by the G (green), B (blue), and
+ * A (alpha) planes. All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
* <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
* <td>1</td>
* <td>A single plane of raw sensor image data, with 16 bits per color
@@ -115,14 +162,43 @@ public abstract class Image implements AutoCloseable {
/**
* Get the timestamp associated with this frame.
* <p>
- * The timestamp is measured in nanoseconds, and is monotonically
- * increasing. However, the zero point and whether the timestamp can be
- * compared against other sources of time or images depend on the source of
- * this image.
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. The timestamps for the images from different sources may have
+ * different timebases therefore may not be comparable. The specific meaning and
+ * timebase of the timestamp depend on the source providing images. See
+ * {@link android.hardware.Camera Camera},
+ * {@link android.hardware.camera2.CameraDevice CameraDevice},
+ * {@link MediaPlayer} and {@link MediaCodec} for more details.
* </p>
*/
public abstract long getTimestamp();
+ /**
+ * Set the timestamp associated with this frame.
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. The timestamps for the images from different sources may have
+ * different timebases therefore may not be comparable. The specific meaning and
+ * timebase of the timestamp depend on the source providing images. See
+ * {@link android.hardware.Camera Camera},
+ * {@link android.hardware.camera2.CameraDevice CameraDevice},
+ * {@link MediaPlayer} and {@link MediaCodec} for more details.
+ * </p>
+ * <p>
+ * For images dequeued from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
+ * set the timestamps correctly before sending them back to the
+ * {@link ImageWriter}, or the timestamp will be generated automatically when
+ * {@link ImageWriter#queueInputImage queueInputImage()} is called.
+ * </p>
+ *
+ * @param timestamp The timestamp to be set for this image.
+ */
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+ return;
+ }
+
private Rect mCropRect;
/**
@@ -132,6 +208,8 @@ public abstract class Image implements AutoCloseable {
* using coordinates in the largest-resolution plane.
*/
public Rect getCropRect() {
+ throwISEIfImageIsInvalid();
+
if (mCropRect == null) {
return new Rect(0, 0, getWidth(), getHeight());
} else {
@@ -146,6 +224,8 @@ public abstract class Image implements AutoCloseable {
* using coordinates in the largest-resolution plane.
*/
public void setCropRect(Rect cropRect) {
+ throwISEIfImageIsInvalid();
+
if (cropRect != null) {
cropRect = new Rect(cropRect); // make a copy
cropRect.intersect(0, 0, getWidth(), getHeight());
@@ -155,7 +235,11 @@ public abstract class Image implements AutoCloseable {
/**
* Get the array of pixel planes for this Image. The number of planes is
- * determined by the format of the Image.
+ * determined by the format of the Image. The application will get an empty
+ * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
+ * PRIVATE}, because the image pixel data is not directly accessible. The
+ * application can check the image format by calling
+ * {@link Image#getFormat()}.
*/
public abstract Plane[] getPlanes();
@@ -164,14 +248,76 @@ public abstract class Image implements AutoCloseable {
* <p>
* After calling this method, calling any methods on this {@code Image} will
* result in an {@link IllegalStateException}, and attempting to read from
- * {@link ByteBuffer ByteBuffers} returned by an earlier
- * {@link Plane#getBuffer} call will have undefined behavior.
+ * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Plane#getBuffer} call will have undefined behavior. If the image
+ * was obtained from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
+ * image data filled by the application will be lost and the image will be
+ * returned to {@link ImageWriter} for reuse. Images given to
+ * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
+ * closed.
* </p>
*/
@Override
public abstract void close();
/**
+ * <p>
+ * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return true if the image is attachable to a new owner, false if the image is still attached
+ * to its current owner, or the image is a stand-alone image and is not attachable to
+ * a new owner.
+ */
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+
+ return false;
+ }
+
+ /**
+ * <p>
+ * Get the owner of the {@link Image}.
+ * </p>
+ * <p>
+ * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
+ * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
+ * if the image is stand-alone image or the owner is unknown.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return The owner of the Image.
+ */
+ Object getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return null;
+ }
+
+ /**
+ * Get native context (buffer pointer) associated with this image.
+ * <p>
+ * This is a package private method that is only used internally. It can be
+ * used to get the native buffer pointer and passed to native, which may be
+ * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
+ * JNI call.
+ * </p>
+ *
+ * @return native context associated with this Image.
+ */
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return 0;
+ }
+
+ /**
* <p>A single color plane of image data.</p>
*
* <p>The number and meaning of the planes in an Image are determined by the
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index 8d6a588..c97de5d 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -23,10 +23,13 @@ import android.os.Looper;
import android.os.Message;
import android.view.Surface;
+import dalvik.system.VMRuntime;
+
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.NioUtils;
+import java.util.concurrent.atomic.AtomicBoolean;
/**
* <p>The ImageReader class allows direct application access to image data
@@ -34,7 +37,7 @@ import java.nio.NioUtils;
*
* <p>Several Android media API classes accept Surface objects as targets to
* render to, including {@link MediaPlayer}, {@link MediaCodec},
- * {@link android.hardware.camera2.CameraDevice}, and
+ * {@link android.hardware.camera2.CameraDevice}, {@link ImageWriter} and
* {@link android.renderscript.Allocation RenderScript Allocations}. The image
* sizes and formats that can be used with each source vary, and should be
* checked in the documentation for the specific API.</p>
@@ -67,33 +70,52 @@ public class ImageReader implements AutoCloseable {
private static final int ACQUIRE_MAX_IMAGES = 2;
/**
- * <p>Create a new reader for images of the desired size and format.</p>
- *
- * <p>The {@code maxImages} parameter determines the maximum number of {@link Image}
- * objects that can be be acquired from the {@code ImageReader}
- * simultaneously. Requesting more buffers will use up more memory, so it is
- * important to use only the minimum number necessary for the use case.</p>
- *
- * <p>The valid sizes and formats depend on the source of the image
- * data.</p>
+ * <p>
+ * Create a new reader for images of the desired size and format.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be acquired from the
+ * {@code ImageReader} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary
+ * for the use case.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ * <p>
+ * If the {@code format} is {@link ImageFormat#PRIVATE PRIVATE}, the created
+ * {@link ImageReader} will produce images that are not directly accessible
+ * by the application. The application can still acquire images from this
+ * {@link ImageReader}, and send them to the
+ * {@link android.hardware.camera2.CameraDevice camera} for reprocessing via
+ * {@link ImageWriter} interface. However, the {@link Image#getPlanes()
+ * getPlanes()} will return an empty array for {@link ImageFormat#PRIVATE
+ * PRIVATE} format images. The application can check if an existing reader's
+ * format by calling {@link #getImageFormat()}.
+ * </p>
+ * <p>
+ * {@link ImageFormat#PRIVATE PRIVATE} format {@link ImageReader
+ * ImageReaders} are more efficient to use when application access to image
+ * data is not necessary, compared to ImageReaders using other format such
+ * as {@link ImageFormat#YUV_420_888 YUV_420_888}.
+ * </p>
*
- * @param width
- * The default width in pixels of the Images that this reader will produce.
- * @param height
- * The default height in pixels of the Images that this reader will produce.
- * @param format
- * The format of the Image that this reader will produce. This
+ * @param width The default width in pixels of the Images that this reader
+ * will produce.
+ * @param height The default height in pixels of the Images that this reader
+ * will produce.
+ * @param format The format of the Image that this reader will produce. This
* must be one of the {@link android.graphics.ImageFormat} or
- * {@link android.graphics.PixelFormat} constants. Note that
- * not all formats is supported, like ImageFormat.NV21.
- * @param maxImages
- * The maximum number of images the user will want to
- * access simultaneously. This should be as small as possible to limit
- * memory use. Once maxImages Images are obtained by the user, one of them
- * has to be released before a new Image will become available for access
- * through {@link #acquireLatestImage()} or {@link #acquireNextImage()}.
+ * {@link android.graphics.PixelFormat} constants. Note that not
+ * all formats are supported, like ImageFormat.NV21.
+ * @param maxImages The maximum number of images the user will want to
+ * access simultaneously. This should be as small as possible to
+ * limit memory use. Once maxImages Images are obtained by the
+ * user, one of them has to be released before a new Image will
+ * become available for access through
+ * {@link #acquireLatestImage()} or {@link #acquireNextImage()}.
* Must be greater than 0.
- *
* @see Image
*/
public static ImageReader newInstance(int width, int height, int format, int maxImages) {
@@ -128,6 +150,13 @@ public class ImageReader implements AutoCloseable {
nativeInit(new WeakReference<ImageReader>(this), width, height, format, maxImages);
mSurface = nativeGetSurface();
+
+ // Estimate the native buffer allocation size and register it so it gets accounted for
+ // during GC. Note that this doesn't include the buffers required by the buffer queue
+ // itself and the buffers requested by the producer.
+ mEstimatedNativeAllocBytes = ImageUtils.getEstimatedNativeAllocBytes(width, height, format,
+ maxImages);
+ VMRuntime.getRuntime().registerNativeAllocation(mEstimatedNativeAllocBytes);
}
/**
@@ -275,7 +304,7 @@ public class ImageReader implements AutoCloseable {
* @hide
*/
public Image acquireNextImageNoThrowISE() {
- SurfaceImage si = new SurfaceImage();
+ SurfaceImage si = new SurfaceImage(mFormat);
return acquireNextSurfaceImage(si) == ACQUIRE_SUCCESS ? si : null;
}
@@ -300,7 +329,7 @@ public class ImageReader implements AutoCloseable {
switch (status) {
case ACQUIRE_SUCCESS:
si.createSurfacePlanes();
- si.setImageValid(true);
+ si.mIsImageValid = true;
case ACQUIRE_NO_BUFS:
case ACQUIRE_MAX_IMAGES:
break;
@@ -340,7 +369,9 @@ public class ImageReader implements AutoCloseable {
* @see #acquireLatestImage
*/
public Image acquireNextImage() {
- SurfaceImage si = new SurfaceImage();
+ // Initialize with reader format, but can be overwritten by native if the image
+ // format is different from the reader format.
+ SurfaceImage si = new SurfaceImage(mFormat);
int status = acquireNextSurfaceImage(si);
switch (status) {
@@ -374,7 +405,7 @@ public class ImageReader implements AutoCloseable {
si.clearSurfacePlanes();
nativeReleaseImage(i);
- si.setImageValid(false);
+ si.mIsImageValid = false;
}
/**
@@ -443,7 +474,12 @@ public class ImageReader implements AutoCloseable {
@Override
public void close() {
setOnImageAvailableListener(null, null);
+ if (mSurface != null) mSurface.release();
nativeClose();
+ if (mEstimatedNativeAllocBytes > 0) {
+ VMRuntime.getRuntime().registerNativeFree(mEstimatedNativeAllocBytes);
+ mEstimatedNativeAllocBytes = 0;
+ }
}
@Override
@@ -456,6 +492,57 @@ public class ImageReader implements AutoCloseable {
}
/**
+ * <p>
+ * Remove the ownership of this image from the ImageReader.
+ * </p>
+ * <p>
+ * After this call, the ImageReader no longer owns this image, and the image
+ * ownership can be transfered to another entity like {@link ImageWriter}
+ * via {@link ImageWriter#queueInputImage}. It's up to the new owner to
+ * release the resources held by this image. For example, if the ownership
+ * of this image is transfered to an {@link ImageWriter}, the image will be
+ * freed by the ImageWriter after the image data consumption is done.
+ * </p>
+ * <p>
+ * This method can be used to achieve zero buffer copy for use cases like
+ * {@link android.hardware.camera2.CameraDevice Camera2 API} PRIVATE and YUV
+ * reprocessing, where the application can select an output image from
+ * {@link ImageReader} and transfer this image directly to
+ * {@link ImageWriter}, where this image can be consumed by camera directly.
+ * For PRIVATE reprocessing, this is the only way to send input buffers to
+ * the {@link android.hardware.camera2.CameraDevice camera} for
+ * reprocessing.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @param image The image to be detached from this ImageReader.
+ * @throws IllegalStateException If the ImageReader or image have been
+ * closed, or the has been detached, or has not yet been
+ * acquired.
+ */
+ void detachImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("input image must not be null");
+ }
+ if (!isImageOwnedbyMe(image)) {
+ throw new IllegalArgumentException("Trying to detach an image that is not owned by"
+ + " this ImageReader");
+ }
+
+ SurfaceImage si = (SurfaceImage) image;
+ si.throwISEIfImageIsInvalid();
+
+ if (si.isAttachable()) {
+ throw new IllegalStateException("Image was already detached from this ImageReader");
+ }
+
+ nativeDetachImage(image);
+ si.setDetached(true);
+ }
+
+ /**
* Only a subset of the formats defined in
* {@link android.graphics.ImageFormat ImageFormat} and
* {@link android.graphics.PixelFormat PixelFormat} are supported by
@@ -483,13 +570,25 @@ public class ImageReader implements AutoCloseable {
case ImageFormat.Y16:
case ImageFormat.RAW_SENSOR:
case ImageFormat.RAW10:
+ case ImageFormat.DEPTH16:
+ case ImageFormat.DEPTH_POINT_CLOUD:
return 1;
+ case ImageFormat.PRIVATE:
+ return 0;
default:
throw new UnsupportedOperationException(
String.format("Invalid format specified %d", mFormat));
}
}
+ private boolean isImageOwnedbyMe(Image image) {
+ if (!(image instanceof SurfaceImage)) {
+ return false;
+ }
+ SurfaceImage si = (SurfaceImage) image;
+ return si.getReader() == this;
+ }
+
/**
* Called from Native code when an Event happens.
*
@@ -520,6 +619,7 @@ public class ImageReader implements AutoCloseable {
private final int mMaxImages;
private final int mNumPlanes;
private final Surface mSurface;
+ private int mEstimatedNativeAllocBytes;
private final Object mListenerLock = new Object();
private OnImageAvailableListener mListener;
@@ -551,8 +651,8 @@ public class ImageReader implements AutoCloseable {
}
private class SurfaceImage extends android.media.Image {
- public SurfaceImage() {
- mIsImageValid = false;
+ public SurfaceImage(int format) {
+ mFormat = format;
}
@Override
@@ -568,56 +668,47 @@ public class ImageReader implements AutoCloseable {
@Override
public int getFormat() {
- if (mIsImageValid) {
- return ImageReader.this.mFormat;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ int readerFormat = ImageReader.this.getImageFormat();
+ // Assume opaque reader always produce opaque images.
+ mFormat = (readerFormat == ImageFormat.PRIVATE) ? readerFormat :
+ nativeGetFormat(readerFormat);
+ return mFormat;
}
@Override
public int getWidth() {
- if (mIsImageValid) {
- if (mWidth == -1) {
- mWidth = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getWidth() :
- nativeGetWidth();
- }
- return mWidth;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ mWidth = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getWidth() :
+ nativeGetWidth(mFormat);
+ return mWidth;
}
@Override
public int getHeight() {
- if (mIsImageValid) {
- if (mHeight == -1) {
- mHeight = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getHeight() :
- nativeGetHeight();
- }
- return mHeight;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ mHeight = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getHeight() :
+ nativeGetHeight(mFormat);
+ return mHeight;
}
@Override
public long getTimestamp() {
- if (mIsImageValid) {
- return mTimestamp;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestampNs) {
+ throwISEIfImageIsInvalid();
+ mTimestamp = timestampNs;
}
@Override
public Plane[] getPlanes() {
- if (mIsImageValid) {
- // Shallow copy is fine.
- return mPlanes.clone();
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ // Shallow copy is fine.
+ return mPlanes.clone();
}
@Override
@@ -629,12 +720,27 @@ public class ImageReader implements AutoCloseable {
}
}
- private void setImageValid(boolean isValid) {
- mIsImageValid = isValid;
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ return mIsDetached.get();
+ }
+
+ @Override
+ ImageReader getOwner() {
+ throwISEIfImageIsInvalid();
+ return ImageReader.this;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+ return mNativeBuffer;
}
- private boolean isImageValid() {
- return mIsImageValid;
+ private void setDetached(boolean detached) {
+ throwISEIfImageIsInvalid();
+ mIsDetached.getAndSet(detached);
}
private void clearSurfacePlanes() {
@@ -664,9 +770,7 @@ public class ImageReader implements AutoCloseable {
@Override
public ByteBuffer getBuffer() {
- if (SurfaceImage.this.isImageValid() == false) {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
if (mBuffer != null) {
return mBuffer;
} else {
@@ -680,20 +784,14 @@ public class ImageReader implements AutoCloseable {
@Override
public int getPixelStride() {
- if (SurfaceImage.this.isImageValid()) {
- return mPixelStride;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ return mPixelStride;
}
@Override
public int getRowStride() {
- if (SurfaceImage.this.isImageValid()) {
- return mRowStride;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ return mRowStride;
}
private void clearBuffer() {
@@ -720,7 +818,7 @@ public class ImageReader implements AutoCloseable {
* This field is used to keep track of native object and used by native code only.
* Don't modify.
*/
- private long mLockedBuffer;
+ private long mNativeBuffer;
/**
* This field is set by native code during nativeImageSetup().
@@ -728,14 +826,17 @@ public class ImageReader implements AutoCloseable {
private long mTimestamp;
private SurfacePlane[] mPlanes;
- private boolean mIsImageValid;
private int mHeight = -1;
private int mWidth = -1;
+ private int mFormat = ImageFormat.UNKNOWN;
+ // If this image is detached from the ImageReader.
+ private AtomicBoolean mIsDetached = new AtomicBoolean(false);
private synchronized native ByteBuffer nativeImageGetBuffer(int idx, int readerFormat);
private synchronized native SurfacePlane nativeCreatePlane(int idx, int readerFormat);
- private synchronized native int nativeGetWidth();
- private synchronized native int nativeGetHeight();
+ private synchronized native int nativeGetWidth(int format);
+ private synchronized native int nativeGetHeight(int format);
+ private synchronized native int nativeGetFormat(int readerFormat);
}
private synchronized native void nativeInit(Object weakSelf, int w, int h,
@@ -743,6 +844,7 @@ public class ImageReader implements AutoCloseable {
private synchronized native void nativeClose();
private synchronized native void nativeReleaseImage(Image i);
private synchronized native Surface nativeGetSurface();
+ private synchronized native int nativeDetachImage(Image i);
/**
* @return A return code {@code ACQUIRE_*}
diff --git a/media/java/android/media/ImageUtils.java b/media/java/android/media/ImageUtils.java
new file mode 100644
index 0000000..2763d1d
--- /dev/null
+++ b/media/java/android/media/ImageUtils.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.media.Image.Plane;
+import android.util.Size;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Package private utility class for hosting commonly used Image related methods.
+ */
+class ImageUtils {
+
+ /**
+ * Only a subset of the formats defined in
+ * {@link android.graphics.ImageFormat ImageFormat} and
+ * {@link android.graphics.PixelFormat PixelFormat} are supported by
+ * ImageReader. When reading RGB data from a surface, the formats defined in
+ * {@link android.graphics.PixelFormat PixelFormat} can be used; when
+ * reading YUV, JPEG or raw sensor data (for example, from the camera or video
+ * decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
+ * are used.
+ */
+ public static int getNumPlanesForFormat(int format) {
+ switch (format) {
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ return 3;
+ case ImageFormat.NV16:
+ return 2;
+ case PixelFormat.RGB_565:
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ case PixelFormat.RGB_888:
+ case ImageFormat.JPEG:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y8:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW10:
+ return 1;
+ case ImageFormat.PRIVATE:
+ return 0;
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid format specified %d", format));
+ }
+ }
+
+ /**
+ * <p>
+ * Copy source image data to destination Image.
+ * </p>
+ * <p>
+ * Only support the copy between two non-{@link ImageFormat#PRIVATE PRIVATE} format
+ * images with same properties (format, size, etc.). The data from the
+ * source image will be copied to the byteBuffers from the destination Image
+ * starting from position zero, and the destination image will be rewound to
+ * zero after copy is done.
+ * </p>
+ *
+ * @param src The source image to be copied from.
+ * @param dst The destination image to be copied to.
+ * @throws IllegalArgumentException If the source and destination images
+ * have different format, or one of the images is not copyable.
+ */
+ public static void imageCopy(Image src, Image dst) {
+ if (src == null || dst == null) {
+ throw new IllegalArgumentException("Images should be non-null");
+ }
+ if (src.getFormat() != dst.getFormat()) {
+ throw new IllegalArgumentException("Src and dst images should have the same format");
+ }
+ if (src.getFormat() == ImageFormat.PRIVATE ||
+ dst.getFormat() == ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("PRIVATE format images are not copyable");
+ }
+ if (!(dst.getOwner() instanceof ImageWriter)) {
+ throw new IllegalArgumentException("Destination image is not from ImageWriter. Only"
+ + " the images from ImageWriter are writable");
+ }
+ Size srcSize = new Size(src.getWidth(), src.getHeight());
+ Size dstSize = new Size(dst.getWidth(), dst.getHeight());
+ if (!srcSize.equals(dstSize)) {
+ throw new IllegalArgumentException("source image size " + srcSize + " is different"
+ + " with " + "destination image size " + dstSize);
+ }
+
+ Plane[] srcPlanes = src.getPlanes();
+ Plane[] dstPlanes = dst.getPlanes();
+ ByteBuffer srcBuffer = null;
+ ByteBuffer dstBuffer = null;
+ for (int i = 0; i < srcPlanes.length; i++) {
+ srcBuffer = srcPlanes[i].getBuffer();
+ int srcPos = srcBuffer.position();
+ srcBuffer.rewind();
+ dstBuffer = dstPlanes[i].getBuffer();
+ dstBuffer.rewind();
+ dstBuffer.put(srcBuffer);
+ srcBuffer.position(srcPos);
+ dstBuffer.rewind();
+ }
+ }
+
+ /**
+ * Return the estimated native allocation size in bytes based on width, height, format,
+ * and number of images.
+ *
+ * <p>This is a very rough estimation and should only be used for native allocation
+ * registration in VM so it can be accounted for during GC.</p>
+ *
+ * @param width The width of the images.
+ * @param height The height of the images.
+ * @param format The format of the images.
+ * @param numImages The number of the images.
+ */
+ public static int getEstimatedNativeAllocBytes(int width, int height, int format,
+ int numImages) {
+ double estimatedBytePerPixel;
+ switch (format) {
+ // 10x compression from RGB_888
+ case ImageFormat.JPEG:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ estimatedBytePerPixel = 0.3;
+ break;
+ case ImageFormat.Y8:
+ estimatedBytePerPixel = 1.0;
+ break;
+ case ImageFormat.RAW10:
+ estimatedBytePerPixel = 1.25;
+ break;
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ case ImageFormat.PRIVATE: // A really rough estimate because the real size is unknown.
+ estimatedBytePerPixel = 1.5;
+ break;
+ case ImageFormat.NV16:
+ case PixelFormat.RGB_565:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.DEPTH16:
+ estimatedBytePerPixel = 2.0;
+ break;
+ case PixelFormat.RGB_888:
+ estimatedBytePerPixel = 3.0;
+ break;
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ estimatedBytePerPixel = 4.0;
+ break;
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid format specified %d", format));
+ }
+
+ return (int)(width * height * estimatedBytePerPixel * numImages);
+ }
+}
diff --git a/media/java/android/media/ImageWriter.java b/media/java/android/media/ImageWriter.java
new file mode 100644
index 0000000..2ef2519
--- /dev/null
+++ b/media/java/android/media/ImageWriter.java
@@ -0,0 +1,800 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.NioUtils;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * <p>
+ * The ImageWriter class allows an application to produce Image data into a
+ * {@link android.view.Surface}, and have it be consumed by another component
+ * like {@link android.hardware.camera2.CameraDevice CameraDevice}.
+ * </p>
+ * <p>
+ * Several Android API classes can provide input {@link android.view.Surface
+ * Surface} objects for ImageWriter to produce data into, including
+ * {@link MediaCodec MediaCodec} (encoder),
+ * {@link android.hardware.camera2.CameraDevice CameraDevice} (reprocessing
+ * input), {@link ImageReader}, etc.
+ * </p>
+ * <p>
+ * The input Image data is encapsulated in {@link Image} objects. To produce
+ * Image data into a destination {@link android.view.Surface Surface}, the
+ * application can get an input Image via {@link #dequeueInputImage} then write
+ * Image data into it. Multiple such {@link Image} objects can be dequeued at
+ * the same time and queued back in any order, up to the number specified by the
+ * {@code maxImages} constructor parameter.
+ * </p>
+ * <p>
+ * If the application already has an Image from {@link ImageReader}, the
+ * application can directly queue this Image into ImageWriter (via
+ * {@link #queueInputImage}), potentially with zero buffer copies. For the
+ * {@link ImageFormat#PRIVATE PRIVATE} format Images produced by
+ * {@link ImageReader}, this is the only way to send Image data to ImageWriter,
+ * as the Image data aren't accessible by the application.
+ * </p>
+ * Once new input Images are queued into an ImageWriter, it's up to the
+ * downstream components (e.g. {@link ImageReader} or
+ * {@link android.hardware.camera2.CameraDevice}) to consume the Images. If the
+ * downstream components cannot consume the Images at least as fast as the
+ * ImageWriter production rate, the {@link #dequeueInputImage} call will
+ * eventually block and the application will have to drop input frames. </p>
+ */
+public class ImageWriter implements AutoCloseable {
+ private final Object mListenerLock = new Object();
+ private OnImageReleasedListener mListener;
+ private ListenerHandler mListenerHandler;
+ private long mNativeContext;
+
+ // Field below is used by native code, do not access or modify.
+ private int mWriterFormat;
+
+ private final int mMaxImages;
+ // Keep track of the currently dequeued Image.
+ private List<Image> mDequeuedImages = new ArrayList<Image>();
+
+ /**
+ * <p>
+ * Create a new ImageWriter.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be dequeued from the
+ * {@code ImageWriter} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary.
+ * </p>
+ * <p>
+ * The input Image size and format depend on the Surface that is provided by
+ * the downstream consumer end-point.
+ * </p>
+ *
+ * @param surface The destination Surface this writer produces Image data
+ * into.
+ * @param maxImages The maximum number of Images the user will want to
+ * access simultaneously for producing Image data. This should be
+ * as small as possible to limit memory use. Once maxImages
+ * Images are dequeued by the user, one of them has to be queued
+ * back before a new Image can be dequeued for access via
+ * {@link #dequeueInputImage()}.
+ * @return a new ImageWriter instance.
+ */
+ public static ImageWriter newInstance(Surface surface, int maxImages) {
+ return new ImageWriter(surface, maxImages);
+ }
+
+ /**
+ * @hide
+ */
+ protected ImageWriter(Surface surface, int maxImages) {
+ if (surface == null || maxImages < 1) {
+ throw new IllegalArgumentException("Illegal input argument: surface " + surface
+ + ", maxImages: " + maxImages);
+ }
+
+ mMaxImages = maxImages;
+ // Note that the underlying BufferQueue is working in synchronous mode
+ // to avoid dropping any buffers.
+ mNativeContext = nativeInit(new WeakReference<ImageWriter>(this), surface, maxImages);
+ }
+
+ /**
+ * <p>
+ * Maximum number of Images that can be dequeued from the ImageWriter
+ * simultaneously (for example, with {@link #dequeueInputImage()}).
+ * </p>
+ * <p>
+ * An Image is considered dequeued after it's returned by
+ * {@link #dequeueInputImage()} from ImageWriter, and until the Image is
+ * sent back to ImageWriter via {@link #queueInputImage}, or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * Attempting to dequeue more than {@code maxImages} concurrently will
+ * result in the {@link #dequeueInputImage()} function throwing an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @return Maximum number of Images that can be dequeued from this
+ * ImageWriter.
+ * @see #dequeueInputImage
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public int getMaxImages() {
+ return mMaxImages;
+ }
+
+ /**
+ * <p>
+ * Dequeue the next available input Image for the application to produce
+ * data into.
+ * </p>
+ * <p>
+ * This method requests a new input Image from ImageWriter. The application
+ * owns this Image after this call. Once the application fills the Image
+ * data, it is expected to return this Image back to ImageWriter for
+ * downstream consumer components (e.g.
+ * {@link android.hardware.camera2.CameraDevice}) to consume. The Image can
+ * be returned to ImageWriter via {@link #queueInputImage} or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * This call will block if all available input images have been queued by
+ * the application and the downstream consumer has not yet consumed any.
+ * When an Image is consumed by the downstream consumer and released, an
+ * {@link OnImageReleasedListener#onImageReleased} callback will be fired,
+ * which indicates that there is one input Image available. For non-
+ * {@link ImageFormat#PRIVATE PRIVATE} formats (
+ * {@link ImageWriter#getFormat()} != {@link ImageFormat#PRIVATE}), it is
+ * recommended to dequeue the next Image only after this callback is fired,
+ * in the steady state.
+ * </p>
+ * <p>
+ * If the format of ImageWriter is {@link ImageFormat#PRIVATE PRIVATE} (
+ * {@link ImageWriter#getFormat()} == {@link ImageFormat#PRIVATE}), the
+ * image buffer is inaccessible to the application, and calling this method
+ * will result in an {@link IllegalStateException}. Instead, the application
+ * should acquire images from some other component (e.g. an
+ * {@link ImageReader}), and queue them directly to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method.
+ * </p>
+ *
+ * @return The next available input Image from this ImageWriter.
+ * @throws IllegalStateException if {@code maxImages} Images are currently
+ * dequeued, or the ImageWriter format is
+ * {@link ImageFormat#PRIVATE PRIVATE}.
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public Image dequeueInputImage() {
+ if (mWriterFormat == ImageFormat.PRIVATE) {
+ throw new IllegalStateException(
+ "PRIVATE format ImageWriter doesn't support this operation since the images are"
+ + " inaccessible to the application!");
+ }
+
+ if (mDequeuedImages.size() >= mMaxImages) {
+ throw new IllegalStateException("Already dequeued max number of Images " + mMaxImages);
+ }
+ WriterSurfaceImage newImage = new WriterSurfaceImage(this);
+ nativeDequeueInputImage(mNativeContext, newImage);
+ mDequeuedImages.add(newImage);
+ newImage.mIsImageValid = true;
+ return newImage;
+ }
+
+ /**
+ * <p>
+ * Queue an input {@link Image} back to ImageWriter for the downstream
+ * consumer to access.
+ * </p>
+ * <p>
+ * The input {@link Image} could be from ImageReader (acquired via
+ * {@link ImageReader#acquireNextImage} or
+ * {@link ImageReader#acquireLatestImage}), or from this ImageWriter
+ * (acquired via {@link #dequeueInputImage}). In the former case, the Image
+ * data will be moved to this ImageWriter. Note that the Image properties
+ * (size, format, strides, etc.) must be the same as the properties of the
+ * images dequeued from this ImageWriter, or this method will throw an
+ * {@link IllegalArgumentException}. In the latter case, the application has
+ * filled the input image with data. This method then passes the filled
+ * buffer to the downstream consumer. In both cases, it's up to the caller
+ * to ensure that the Image timestamp (in nanoseconds) is correctly set, as
+ * the downstream component may want to use it to indicate the Image data
+ * capture time.
+ * </p>
+ * <p>
+ * After this method is called and the downstream consumer consumes and
+ * releases the Image, an {@link OnImageReleasedListener#onImageReleased}
+ * callback will fire. The application can use this callback to avoid
+ * sending Images faster than the downstream consumer processing rate in
+ * steady state.
+ * </p>
+ * <p>
+ * Passing in an Image from some other component (e.g. an
+ * {@link ImageReader}) requires a free input Image from this ImageWriter as
+ * the destination. In this case, this call will block, as
+ * {@link #dequeueInputImage} does, if there are no free Images available.
+ * To avoid blocking, the application should ensure that there is at least
+ * one free Image available in this ImageWriter before calling this method.
+ * </p>
+ * <p>
+ * After this call, the input Image is no longer valid for further access,
+ * as if the Image is {@link Image#close closed}. Attempting to access the
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will result in an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @param image The Image to be queued back to ImageWriter for future
+ * consumption.
+ * @see #dequeueInputImage()
+ */
+ public void queueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ boolean ownedByMe = isImageOwnedByMe(image);
+ if (ownedByMe && !(((WriterSurfaceImage) image).mIsImageValid)) {
+ throw new IllegalStateException("Image from ImageWriter is invalid");
+ }
+
+ // For images from other components, need to detach first, then attach.
+ if (!ownedByMe) {
+ if (!(image.getOwner() instanceof ImageReader)) {
+ throw new IllegalArgumentException("Only images from ImageReader can be queued to"
+ + " ImageWriter, other image source is not supported yet!");
+ }
+
+ ImageReader prevOwner = (ImageReader) image.getOwner();
+ // Only do the image attach for PRIVATE format images for now. Do the image
+ // copy for other formats. TODO: use attach for other formats to
+ // improve the performance, and fall back to copy when attach/detach
+ // fails. Right now, detach is guaranteed to fail as the buffer is
+ // locked when ImageReader#acquireNextImage is called. See bug 19962027.
+ if (image.getFormat() == ImageFormat.PRIVATE) {
+ prevOwner.detachImage(image);
+ attachAndQueueInputImage(image);
+ // This clears the native reference held by the original owner.
+ // When this Image is detached later by this ImageWriter, the
+ // native memory won't be leaked.
+ image.close();
+ return;
+ } else {
+ Image inputImage = dequeueInputImage();
+ inputImage.setTimestamp(image.getTimestamp());
+ inputImage.setCropRect(image.getCropRect());
+ ImageUtils.imageCopy(image, inputImage);
+ image.close();
+ image = inputImage;
+ ownedByMe = true;
+ }
+ }
+
+ Rect crop = image.getCropRect();
+ nativeQueueInputImage(mNativeContext, image, image.getTimestamp(), crop.left, crop.top,
+ crop.right, crop.bottom);
+
+ /**
+ * Only remove and cleanup the Images that are owned by this
+ * ImageWriter. Images detached from other owners are only temporarily
+ * owned by this ImageWriter and will be detached immediately after they
+ * are released by downstream consumers, so there is no need to keep
+ * track of them in mDequeuedImages.
+ */
+ if (ownedByMe) {
+ mDequeuedImages.remove(image);
+ // Do not call close here, as close is essentially cancel image.
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+ }
+
+ /**
+ * Get the ImageWriter format.
+ * <p>
+ * This format may be different than the Image format returned by
+ * {@link Image#getFormat()}. However, if the ImageWriter format is
+ * {@link ImageFormat#PRIVATE PRIVATE}, calling {@link #dequeueInputImage()}
+ * will result in an {@link IllegalStateException}.
+ * </p>
+ *
+ * @return The ImageWriter format.
+ */
+ public int getFormat() {
+ return mWriterFormat;
+ }
+
+ /**
+ * ImageWriter callback interface, used to to asynchronously notify the
+ * application of various ImageWriter events.
+ */
+ public interface OnImageReleasedListener {
+ /**
+ * <p>
+ * Callback that is called when an input Image is released back to
+ * ImageWriter after the data consumption.
+ * </p>
+ * <p>
+ * The client can use this callback to be notified that an input Image
+ * has been consumed and released by the downstream consumer. More
+ * specifically, this callback will be fired for below cases:
+ * <li>The application dequeues an input Image via the
+ * {@link ImageWriter#dequeueInputImage dequeueInputImage()} method,
+ * uses it, and then queues it back to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method. After
+ * the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired. This image will be
+ * available to be dequeued after this callback.</li>
+ * <li>The application obtains an Image from some other component (e.g.
+ * an {@link ImageReader}), uses it, and then queues it to this
+ * ImageWriter via {@link ImageWriter#queueInputImage queueInputImage()}.
+ * After the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired.</li>
+ * </p>
+ *
+ * @param writer the ImageWriter the callback is associated with.
+ * @see ImageWriter
+ * @see Image
+ */
+ void onImageReleased(ImageWriter writer);
+ }
+
+ /**
+ * Register a listener to be invoked when an input Image is returned to the
+ * ImageWriter.
+ *
+ * @param listener The listener that will be run.
+ * @param handler The handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's
+ * looper.
+ * @throws IllegalArgumentException If no handler specified and the calling
+ * thread has no looper.
+ */
+ public void setOnImageReleasedListener(OnImageReleasedListener listener, Handler handler) {
+ synchronized (mListenerLock) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "handler is null but the current thread is not a looper");
+ }
+ if (mListenerHandler == null || mListenerHandler.getLooper() != looper) {
+ mListenerHandler = new ListenerHandler(looper);
+ }
+ mListener = listener;
+ } else {
+ mListener = null;
+ mListenerHandler = null;
+ }
+ }
+ }
+
+ /**
+ * Free up all the resources associated with this ImageWriter.
+ * <p>
+ * After calling this method, this ImageWriter cannot be used. Calling any
+ * methods on this ImageWriter and Images previously provided by
+ * {@link #dequeueInputImage()} will result in an
+ * {@link IllegalStateException}, and attempting to write into
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will have undefined
+ * behavior.
+ * </p>
+ */
+ @Override
+ public void close() {
+ setOnImageReleasedListener(null, null);
+ for (Image image : mDequeuedImages) {
+ image.close();
+ }
+ mDequeuedImages.clear();
+ nativeClose(mNativeContext);
+ mNativeContext = 0;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * <p>
+ * Attach and queue input Image to this ImageWriter.
+ * </p>
+ * <p>
+ * When the format of an Image is {@link ImageFormat#PRIVATE PRIVATE}, or
+ * the source Image is so large that copying its data is too expensive, this
+ * method can be used to migrate the source Image into ImageWriter without a
+ * data copy, and then queue it to this ImageWriter. The source Image must
+ * be detached from its previous owner already, or this call will throw an
+ * {@link IllegalStateException}.
+ * </p>
+ * <p>
+ * After this call, the ImageWriter takes ownership of this Image. This
+ * ownership will automatically be removed from this writer after the
+ * consumer releases this Image, that is, after
+ * {@link OnImageReleasedListener#onImageReleased}. The caller is responsible for
+ * closing this Image through {@link Image#close()} to free up the resources
+ * held by this Image.
+ * </p>
+ *
+ * @param image The source Image to be attached and queued into this
+ * ImageWriter for downstream consumer to use.
+ * @throws IllegalStateException if the Image is not detached from its
+ * previous owner, or the Image is already attached to this
+ * ImageWriter, or the source Image is invalid.
+ */
+ private void attachAndQueueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ if (isImageOwnedByMe(image)) {
+ throw new IllegalArgumentException(
+ "Can not attach an image that is owned ImageWriter already");
+ }
+ /**
+ * Throw ISE if the image is not attachable, which means that it is
+ * either owned by other entity now, or completely non-attachable (some
+ * stand-alone images are not backed by native gralloc buffer, thus not
+ * attachable).
+ */
+ if (!image.isAttachable()) {
+ throw new IllegalStateException("Image was not detached from last owner, or image "
+ + " is not detachable");
+ }
+
+ // TODO: what if attach failed, throw RTE or detach a slot then attach?
+ // need do some cleanup to make sure no orphaned
+ // buffer caused leak.
+ Rect crop = image.getCropRect();
+ nativeAttachAndQueueImage(mNativeContext, image.getNativeContext(), image.getFormat(),
+ image.getTimestamp(), crop.left, crop.top, crop.right, crop.bottom);
+ }
+
+ /**
+ * This custom handler runs asynchronously so callbacks don't get queued
+ * behind UI messages.
+ */
+ private final class ListenerHandler extends Handler {
+ public ListenerHandler(Looper looper) {
+ super(looper, null, true /* async */);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ OnImageReleasedListener listener;
+ synchronized (mListenerLock) {
+ listener = mListener;
+ }
+ if (listener != null) {
+ listener.onImageReleased(ImageWriter.this);
+ }
+ }
+ }
+
+ /**
+ * Called from Native code when an Event happens. This may be called from an
+ * arbitrary Binder thread, so access to the ImageWriter must be
+ * synchronized appropriately.
+ */
+ private static void postEventFromNative(Object selfRef) {
+ @SuppressWarnings("unchecked")
+ WeakReference<ImageWriter> weakSelf = (WeakReference<ImageWriter>) selfRef;
+ final ImageWriter iw = weakSelf.get();
+ if (iw == null) {
+ return;
+ }
+
+ final Handler handler;
+ synchronized (iw.mListenerLock) {
+ handler = iw.mListenerHandler;
+ }
+ if (handler != null) {
+ handler.sendEmptyMessage(0);
+ }
+ }
+
+ /**
+ * <p>
+ * Abort the Images that were dequeued from this ImageWriter, and return
+ * them to this writer for reuse.
+ * </p>
+ * <p>
+ * This method is used for the cases where the application dequeued the
+ * Image, may have filled the data, but does not want the downstream
+ * component to consume it. The Image will be returned to this ImageWriter
+ * for reuse after this call, and the ImageWriter will immediately have an
+ * Image available to be dequeued. This aborted Image will be invisible to
+ * the downstream consumer, as if nothing happened.
+ * </p>
+ *
+ * @param image The Image to be aborted.
+ * @see #dequeueInputImage()
+ * @see Image#close()
+ */
+ private void abortImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+
+ if (!mDequeuedImages.contains(image)) {
+ throw new IllegalStateException("It is illegal to abort some image that is not"
+ + " dequeued yet");
+ }
+
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+
+ if (!wi.mIsImageValid) {
+ throw new IllegalStateException("Image is invalid");
+ }
+
+ /**
+ * We only need abort Images that are owned and dequeued by ImageWriter.
+ * For attached Images, no need to abort, as there are only two cases:
+ * attached + queued successfully, and attach failed. Neither of the
+ * cases need abort.
+ */
+ cancelImage(mNativeContext, image);
+ mDequeuedImages.remove(image);
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+
+ private boolean isImageOwnedByMe(Image image) {
+ if (!(image instanceof WriterSurfaceImage)) {
+ return false;
+ }
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ if (wi.getOwner() != this) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private static class WriterSurfaceImage extends android.media.Image {
+ private ImageWriter mOwner;
+ // This field is used by native code, do not access or modify.
+ private long mNativeBuffer;
+ private int mNativeFenceFd = -1;
+ private SurfacePlane[] mPlanes;
+ private int mHeight = -1;
+ private int mWidth = -1;
+ private int mFormat = -1;
+ // When this default timestamp is used, timestamp for the input Image
+ // will be generated automatically when queueInputBuffer is called.
+ private final long DEFAULT_TIMESTAMP = Long.MIN_VALUE;
+ private long mTimestamp = DEFAULT_TIMESTAMP;
+
+ public WriterSurfaceImage(ImageWriter writer) {
+ mOwner = writer;
+ }
+
+ @Override
+ public int getFormat() {
+ throwISEIfImageIsInvalid();
+
+ if (mFormat == -1) {
+ mFormat = nativeGetFormat();
+ }
+ return mFormat;
+ }
+
+ @Override
+ public int getWidth() {
+ throwISEIfImageIsInvalid();
+
+ if (mWidth == -1) {
+ mWidth = nativeGetWidth();
+ }
+
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ throwISEIfImageIsInvalid();
+
+ if (mHeight == -1) {
+ mHeight = nativeGetHeight();
+ }
+
+ return mHeight;
+ }
+
+ @Override
+ public long getTimestamp() {
+ throwISEIfImageIsInvalid();
+
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+
+ mTimestamp = timestamp;
+ }
+
+ @Override
+ public Plane[] getPlanes() {
+ throwISEIfImageIsInvalid();
+
+ if (mPlanes == null) {
+ int numPlanes = ImageUtils.getNumPlanesForFormat(getFormat());
+ mPlanes = nativeCreatePlanes(numPlanes, getOwner().getFormat());
+ }
+
+ return mPlanes.clone();
+ }
+
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ // Don't allow Image to be detached from ImageWriter for now, as no
+ // detach API is exposed.
+ return false;
+ }
+
+ @Override
+ ImageWriter getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return mOwner;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return mNativeBuffer;
+ }
+
+ @Override
+ public void close() {
+ if (mIsImageValid) {
+ getOwner().abortImage(this);
+ }
+ }
+
+ @Override
+ protected final void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void clearSurfacePlanes() {
+ if (mIsImageValid) {
+ for (int i = 0; i < mPlanes.length; i++) {
+ if (mPlanes[i] != null) {
+ mPlanes[i].clearBuffer();
+ mPlanes[i] = null;
+ }
+ }
+ }
+ }
+
+ private class SurfacePlane extends android.media.Image.Plane {
+ private ByteBuffer mBuffer;
+ final private int mPixelStride;
+ final private int mRowStride;
+
+ // SurfacePlane instance is created by native code when a new
+ // SurfaceImage is created
+ private SurfacePlane(int rowStride, int pixelStride, ByteBuffer buffer) {
+ mRowStride = rowStride;
+ mPixelStride = pixelStride;
+ mBuffer = buffer;
+ /**
+ * Set the byteBuffer order according to host endianness (native
+ * order), otherwise, the byteBuffer order defaults to
+ * ByteOrder.BIG_ENDIAN.
+ */
+ mBuffer.order(ByteOrder.nativeOrder());
+ }
+
+ @Override
+ public int getRowStride() {
+ throwISEIfImageIsInvalid();
+ return mRowStride;
+ }
+
+ @Override
+ public int getPixelStride() {
+ throwISEIfImageIsInvalid();
+ return mPixelStride;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ throwISEIfImageIsInvalid();
+ return mBuffer;
+ }
+
+ private void clearBuffer() {
+ // Need null check first, as the getBuffer() may not be called
+ // before an Image is closed.
+ if (mBuffer == null) {
+ return;
+ }
+
+ if (mBuffer.isDirect()) {
+ NioUtils.freeDirectBuffer(mBuffer);
+ }
+ mBuffer = null;
+ }
+
+ }
+
+ // this will create the SurfacePlane object and fill the information
+ private synchronized native SurfacePlane[] nativeCreatePlanes(int numPlanes, int writerFmt);
+
+ private synchronized native int nativeGetWidth();
+
+ private synchronized native int nativeGetHeight();
+
+ private synchronized native int nativeGetFormat();
+ }
+
+ // Native implemented ImageWriter methods.
+ private synchronized native long nativeInit(Object weakSelf, Surface surface, int maxImgs);
+
+ private synchronized native void nativeClose(long nativeCtx);
+
+ private synchronized native void nativeDequeueInputImage(long nativeCtx, Image wi);
+
+ private synchronized native void nativeQueueInputImage(long nativeCtx, Image image,
+ long timestampNs, int left, int top, int right, int bottom);
+
+ private synchronized native int nativeAttachAndQueueImage(long nativeCtx,
+ long imageNativeBuffer, int imageFormat, long timestampNs, int left,
+ int top, int right, int bottom);
+
+ private synchronized native void cancelImage(long nativeCtx, Image image);
+
+ /**
+ * We use a class initializer to allow the native code to cache some field
+ * offsets.
+ */
+ private static native void nativeClassInit();
+
+ static {
+ System.loadLibrary("media_jni");
+ nativeClassInit();
+ }
+}
diff --git a/media/java/android/media/MediaCodec.java b/media/java/android/media/MediaCodec.java
index 8985b52..eec4960 100644
--- a/media/java/android/media/MediaCodec.java
+++ b/media/java/android/media/MediaCodec.java
@@ -16,11 +16,14 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.media.Image;
-import android.media.Image.Plane;
import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaCrypto;
import android.media.MediaFormat;
@@ -31,6 +34,8 @@ import android.os.Message;
import android.view.Surface;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ReadOnlyBufferException;
import java.util.Arrays;
@@ -38,204 +43,1307 @@ import java.util.HashMap;
import java.util.Map;
/**
- * MediaCodec class can be used to access low-level media codec, i.e.
- * encoder/decoder components.
- *
- * <p>MediaCodec is generally used like this:
- * <pre>
- * MediaCodec codec = MediaCodec.createDecoderByType(type);
- * codec.configure(format, ...);
- * codec.start();
- *
- * // if API level <= 20, get input and output buffer arrays here
- * ByteBuffer[] inputBuffers = codec.getInputBuffers();
- * ByteBuffer[] outputBuffers = codec.getOutputBuffers();
- * for (;;) {
- * int inputBufferIndex = codec.dequeueInputBuffer(timeoutUs);
- * if (inputBufferIndex &gt;= 0) {
- * // if API level >= 21, get input buffer here
- * ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferIndex);
- * // fill inputBuffers[inputBufferIndex] with valid data
- * ...
- * codec.queueInputBuffer(inputBufferIndex, ...);
- * }
- *
- * int outputBufferIndex = codec.dequeueOutputBuffer(timeoutUs);
- * if (outputBufferIndex &gt;= 0) {
- * // if API level >= 21, get output buffer here
- * ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferIndex);
- * // outputBuffer is ready to be processed or rendered.
- * ...
- * codec.releaseOutputBuffer(outputBufferIndex, ...);
- * } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
- * // no needed to handle if API level >= 21 and using getOutputBuffer(int)
- * outputBuffers = codec.getOutputBuffers();
- * } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
- * // Subsequent data will conform to new format.
- * // can ignore if API level >= 21 and using getOutputFormat(outputBufferIndex)
- * MediaFormat format = codec.getOutputFormat();
- * ...
- * }
- * }
- * codec.stop();
- * codec.release();
- * codec = null;
- * </pre>
- *
- * Each codec maintains a number of input and output buffers that are
- * referred to by index in API calls.
- * <p>
- * For API levels 20 and below:
- * The contents of these buffers are represented by the ByteBuffer[] arrays
- * accessible through {@link #getInputBuffers} and {@link #getOutputBuffers}.
- * <p>
- * After a successful call to {@link #start} the client "owns" neither
- * input nor output buffers, subsequent calls to {@link #dequeueInputBuffer}
- * and {@link #dequeueOutputBuffer} then transfer ownership from the codec
- * to the client.<p>
- * The client is not required to resubmit/release buffers immediately
- * to the codec, the sample code above simply does this for simplicity's sake.
- * Nonetheless, it is possible that a codec may hold off on generating
- * output buffers until all outstanding buffers have been
- * released/resubmitted.
- * <p>
- * Once the client has an input buffer available it can fill it with data
- * and submit it it to the codec via a call to {@link #queueInputBuffer}.
- * Do not submit multiple input buffers with the same timestamp (unless
- * it is codec-specific data marked as such using the flag
- * {@link #BUFFER_FLAG_CODEC_CONFIG}).
- * <p>
- * The codec in turn will return an output buffer to the client in response
- * to {@link #dequeueOutputBuffer}. After the output buffer has been processed
- * a call to {@link #releaseOutputBuffer} will return it to the codec.
- * If a video surface has been provided in the call to {@link #configure},
- * {@link #releaseOutputBuffer} optionally allows rendering of the buffer
- * to the surface.<p>
- *
- * Input buffers (for decoders) and Output buffers (for encoders) contain
- * encoded data according to the format's type. For video types this data
- * is all the encoded data representing a single moment in time, for audio
- * data this is slightly relaxed in that a buffer may contain multiple
- * encoded frames of audio. In either case, buffers do not start and end on
- * arbitrary byte boundaries, this is not a stream of bytes, it's a stream
- * of access units.<p>
- *
- * Most formats also require the actual data to be prefixed by a number
- * of buffers containing setup data, or codec specific data, i.e. the
- * first few buffers submitted to the codec object after starting it must
- * be codec specific data marked as such using the flag {@link #BUFFER_FLAG_CODEC_CONFIG}
- * in a call to {@link #queueInputBuffer}.
- * <p>
- * Codec specific data included in the format passed to {@link #configure}
- * (in ByteBuffer entries with keys "csd-0", "csd-1", ...) is automatically
- * submitted to the codec, this data MUST NOT be submitted explicitly by the
- * client.
- * <p>
- * Once the client reaches the end of the input data it signals the end of
- * the input stream by specifying a flag of {@link #BUFFER_FLAG_END_OF_STREAM} in the call to
- * {@link #queueInputBuffer}. The codec will continue to return output buffers
- * until it eventually signals the end of the output stream by specifying
- * the same flag ({@link #BUFFER_FLAG_END_OF_STREAM}) on the BufferInfo returned in
- * {@link #dequeueOutputBuffer}. Do not submit additional input buffers after
- * signaling the end of the input stream, unless the codec has been flushed,
- * or stopped and restarted.
- * <p>
- * <h3>Seeking &amp; Adaptive Playback Support</h3>
- *
- * You can check if a decoder supports adaptive playback via {@link
- * MediaCodecInfo.CodecCapabilities#isFeatureSupported}. Adaptive playback
- * is only supported if you configure the codec to decode onto a {@link
- * android.view.Surface}.
- *
- * <h4>For decoders that do not support adaptive playback (including
- * when not decoding onto a Surface)</h4>
- *
- * In order to start decoding data that's not adjacent to previously submitted
- * data (i.e. after a seek) <em>one must</em> {@link #flush} the decoder.
- * Any input or output buffers the client may own at the point of the flush are
- * immediately revoked, i.e. after a call to {@link #flush} the client does not
- * own any buffers anymore.
- * <p>
- * It is important that the input data after a flush starts at a suitable
- * stream boundary. The first frame must be able to be decoded completely on
- * its own (for most codecs this means an I-frame), and that no frames should
- * refer to frames before that first new frame.
- * Note that the format of the data submitted after a flush must not change,
- * flush does not support format discontinuities,
- * for this a full {@link #stop}, {@link #configure configure()}, {@link #start}
- * cycle is necessary.
- *
- * <h4>For decoders that support adaptive playback</h4>
- *
- * In order to start decoding data that's not adjacent to previously submitted
- * data (i.e. after a seek) it is <em>not necessary</em> to {@link #flush} the
- * decoder.
- * <p>
- * It is still important that the input data after the discontinuity starts
- * at a suitable stream boundary (e.g. I-frame), and that no new frames refer
- * to frames before the first frame of the new input data segment.
- * <p>
- * For some video formats it is also possible to change the picture size
- * mid-stream. To do this for H.264, the new Sequence Parameter Set (SPS) and
- * Picture Parameter Set (PPS) values must be packaged together with an
- * Instantaneous Decoder Refresh (IDR) frame in a single buffer, which then
- * can be enqueued as a regular input buffer.
- * The client will receive an {@link #INFO_OUTPUT_FORMAT_CHANGED} return
- * value from {@link #dequeueOutputBuffer dequeueOutputBuffer()} or
- * {@link Callback#onOutputBufferAvailable onOutputBufferAvailable()}
- * just after the picture-size change takes place and before any
- * frames with the new size have been returned.
- * <p>
- * Be careful when calling {@link #flush} shortly after you have changed
- * the picture size. If you have not received confirmation of the picture
- * size change, you will need to repeat the request for the new picture size.
- * E.g. for H.264 you will need to prepend the PPS/SPS to the new IDR
- * frame to ensure that the codec receives the picture size change request.
- *
- * <h3>States and error handling</h3>
- *
- * <p> During its life, a codec conceptually exists in one of the following states:
- * Initialized, Configured, Executing, Error, Uninitialized, (omitting transitory states
- * between them). When created by one of the factory methods,
- * the codec is in the Initialized state; {@link #configure} brings it to the
- * Configured state; {@link #start} brings it to the Executing state.
- * In the Executing state, decoding or encoding occurs through the buffer queue
- * manipulation described above. The method {@link #stop}
- * returns the codec to the Initialized state, whereupon it may be configured again,
- * and {@link #release} brings the codec to the terminal Uninitialized state. When
- * a codec error occurs, the codec moves to the Error state. Use {@link #reset} to
- * bring the codec back to the Initialized state, or {@link #release} to move it
- * to the Uninitialized state.
- *
- * <p> The factory methods
- * {@link #createByCodecName},
- * {@link #createDecoderByType},
- * and {@link #createEncoderByType}
- * throw {@link java.io.IOException} on failure which
- * the caller must catch or declare to pass up.
- * MediaCodec methods throw {@link java.lang.IllegalStateException}
- * when the method is called from a codec state that does not allow it;
- * this is typically due to incorrect application API usage.
- * Methods involving secure buffers may throw
- * {@link MediaCodec.CryptoException#MediaCodec.CryptoException}, which
- * has further error information obtainable from {@link MediaCodec.CryptoException#getErrorCode}.
- *
- * <p> Internal codec errors result in a {@link MediaCodec.CodecException},
- * which may be due to media content corruption, hardware failure, resource exhaustion,
- * and so forth, even when the application is correctly using the API.
- * The recommended action when receiving a {@link MediaCodec.CodecException} can be determined by
- * calling {@link MediaCodec.CodecException#isRecoverable} and
- * {@link MediaCodec.CodecException#isTransient}.
- * If {@link MediaCodec.CodecException#isRecoverable} returns true,
- * then a {@link #stop}, {@link #configure}, and {@link #start} can be performed to recover.
- * If {@link MediaCodec.CodecException#isTransient} returns true,
- * then resources are temporarily unavailable and the method may be retried at a later time.
- * If both {@link MediaCodec.CodecException#isRecoverable}
- * and {@link MediaCodec.CodecException#isTransient} return false,
- * then the {@link MediaCodec.CodecException} is fatal and the codec must be
- * {@link #reset reset} or {@link #release released}.
- * Both {@link MediaCodec.CodecException#isRecoverable} and
- * {@link MediaCodec.CodecException#isTransient} do not return true at the same time.
+ MediaCodec class can be used to access low-level media codecs, i.e. encoder/decoder components.
+ It is part of the Android low-level multimedia support infrastructure (normally used together
+ with {@link MediaExtractor}, {@link MediaSync}, {@link MediaMuxer}, {@link MediaCrypto},
+ {@link MediaDrm}, {@link Image}, {@link Surface}, and {@link AudioTrack}.)
+ <p>
+ <center><object style="width: 540px; height: 205px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_buffers.svg"><img
+ src="../../../images/media/mediacodec_buffers.png" style="width: 540px; height: 205px"
+ alt="MediaCodec buffer flow diagram"></object></center>
+ <p>
+ In broad terms, a codec processes input data to generate output data. It processes data
+ asynchronously and uses a set of input and output buffers. At a simplistic level, you request
+ (or receive) an empty input buffer, fill it up with data and send it to the codec for
+ processing. The codec uses up the data and transforms it into one of its empty output buffers.
+ Finally, you request (or receive) a filled output buffer, consume its contents and release it
+ back to the codec.
+
+ <h3>Data Types</h3>
+ <p>
+ Codecs operate on three kinds of data: compressed data, raw audio data and raw video data.
+ All three kinds of data can be processed using {@link ByteBuffer ByteBuffers}, but you should use
+ a {@link Surface} for raw video data to improve codec performance. Surface uses native video
+ buffers without mapping or copying them to ByteBuffers; thus, it is much more efficient.
+ You normally cannot access the raw video data when using a Surface, but you can use the
+ {@link ImageReader} class to access unsecured decoded (raw) video frames. This may still be more
+ efficient than using ByteBuffers, as some native buffers may be mapped into {@linkplain
+ ByteBuffer#isDirect direct} ByteBuffers. When using ByteBuffer mode, you can access raw video
+ frames using the {@link Image} class and {@link #getInputImage getInput}/{@link #getOutputImage
+ OutputImage(int)}.
+
+ <h4>Compressed Buffers</h4>
+ <p>
+ Input buffers (for decoders) and output buffers (for encoders) contain compressed data according
+ to the {@linkplain MediaFormat#KEY_MIME format's type}. For video types this is a single
+ compressed video frame. For audio data this is normally a single access unit (an encoded audio
+ segment typically containing a few milliseconds of audio as dictated by the format type), but
+ this requirement is slightly relaxed in that a buffer may contain multiple encoded access units
+ of audio. In either case, buffers do not start or end on arbitrary byte boundaries, but rather on
+ frame/access unit boundaries.
+
+ <h4>Raw Audio Buffers</h4>
+ <p>
+ Raw audio buffers contain entire frames of PCM audio data, which is one sample for each channel
+ in channel order. Each sample is a {@linkplain AudioFormat#ENCODING_PCM_16BIT 16-bit signed
+ integer in native byte order}.
+
+ <pre class=prettyprint>
+ short[] getSamplesForChannel(MediaCodec codec, int bufferId, int channelIx) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(bufferId);
+ MediaFormat format = codec.getOutputFormat(bufferId);
+ ShortBuffer samples = outputBuffer.order(ByteOrder.nativeOrder()).asShortBuffer();
+ int numChannels = formet.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ if (channelIx &lt; 0 || channelIx &gt;= numChannels) {
+ return null;
+ }
+ short[] res = new short[samples.remaining() / numChannels];
+ for (int i = 0; i &lt; res.length; ++i) {
+ res[i] = samples.get(i * numChannels + channelIx);
+ }
+ return res;
+ }</pre>
+
+ <h4>Raw Video Buffers</h4>
+ <p>
+ In ByteBuffer mode video buffers are laid out according to their {@linkplain
+ MediaFormat#KEY_COLOR_FORMAT color format}. You can get the supported color formats as an array
+ from {@link #getCodecInfo}{@code .}{@link MediaCodecInfo#getCapabilitiesForType
+ getCapabilitiesForType(&hellip;)}{@code .}{@link CodecCapabilities#colorFormats colorFormats}.
+ Video codecs may support three kinds of color formats:
+ <ul>
+ <li><strong>native raw video format:</strong> This is marked by {@link
+ CodecCapabilities#COLOR_FormatSurface} and it can be used with an input or output Surface.</li>
+ <li><strong>flexible YUV buffers</strong> (such as {@link
+ CodecCapabilities#COLOR_FormatYUV420Flexible}): These can be used with an input/output Surface,
+ as well as in ByteBuffer mode, by using {@link #getInputImage getInput}/{@link #getOutputImage
+ OutputImage(int)}.</li>
+ <li><strong>other, specific formats:</strong> These are normally only supported in ByteBuffer
+ mode. Some color formats are vendor specific. Others are defined in {@link CodecCapabilities}.
+ For color formats that are equivalent to a flexible format, you can still use {@link
+ #getInputImage getInput}/{@link #getOutputImage OutputImage(int)}.</li>
+ </ul>
+ <p>
+ All video codecs support flexible YUV 4:2:0 buffers since {@link
+ android.os.Build.VERSION_CODES#LOLLIPOP_MR1}.
+
+ <h3>States</h3>
+ <p>
+ During its life a codec conceptually exists in one of three states: Stopped, Executing or
+ Released. The Stopped collective state is actually the conglomeration of three states:
+ Uninitialized, Configured and Error, whereas the Executing state conceptually progresses through
+ three sub-states: Flushed, Running and End-of-Stream.
+ <p>
+ <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_states.svg"><img
+ src="../../../images/media/mediacodec_states.png" style="width: 519px; height: 356px"
+ alt="MediaCodec state diagram"></object></center>
+ <p>
+ When you create a codec using one of the factory methods, the codec is in the Uninitialized
+ state. First, you need to configure it via {@link #configure configure(&hellip;)}, which brings
+ it to the Configured state, then call {@link #start} to move it to the Executing state. In this
+ state you can process data through the buffer queue manipulation described above.
+ <p>
+ The Executing state has three sub-states: Flushed, Running and End-of-Stream. Immediately after
+ {@link #start} the codec is in the Flushed sub-state, where it holds all the buffers. As soon
+ as the first input buffer is dequeued, the codec moves to the Running sub-state, where it spends
+ most of its life. When you queue an input buffer with the {@linkplain #BUFFER_FLAG_END_OF_STREAM
+ end-of-stream marker}, the codec transitions to the End-of-Stream sub-state. In this state the
+ codec no longer accepts further input buffers, but still generates output buffers until the
+ end-of-stream is reached on the output. You can move back to the Flushed sub-state at any time
+ while in the Executing state using {@link #flush}.
+ <p>
+ Call {@link #stop} to return the codec to the Uninitialized state, whereupon it may be configured
+ again. When you are done using a codec, you must release it by calling {@link #release}.
+ <p>
+ On rare occasions the codec may encounter an error and move to the Error state. This is
+ communicated using an invalid return value from a queuing operation, or sometimes via an
+ exception. Call {@link #reset} to make the codec usable again. You can call it from any state to
+ move the codec back to the Uninitialized state. Otherwise, call {@link #release} to move to the
+ terminal Released state.
+
+ <h3>Creation</h3>
+ <p>
+ Use {@link MediaCodecList} to create a MediaCodec for a specific {@link MediaFormat}. When
+ decoding a file or a stream, you can get the desired format from {@link
+ MediaExtractor#getTrackFormat MediaExtractor.getTrackFormat}. Inject any specific features that
+ you want to add using {@link MediaFormat#setFeatureEnabled MediaFormat.setFeatureEnabled}, then
+ call {@link MediaCodecList#findDecoderForFormat MediaCodecList.findDecoderForFormat} to get the
+ name of a codec that can handle that specific media format. Finally, create the codec using
+ {@link #createByCodecName}.
+ <p class=note>
+ <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the format to
+ {@code MediaCodecList.findDecoder}/{@code EncoderForFormat} must not contain a {@linkplain
+ MediaFormat#KEY_FRAME_RATE frame rate}. Use
+ <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ to clear any existing frame rate setting in the format.
+ <p>
+ You can also create the preferred codec for a specific MIME type using {@link
+ #createDecoderByType createDecoder}/{@link #createEncoderByType EncoderByType(String)}.
+ This, however, cannot be used to inject features, and may create a codec that cannot handle the
+ specific desired media format.
+
+ <h4>Creating secure decoders</h4>
+ <p>
+ On versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and earlier, secure codecs might
+ not be listed in {@link MediaCodecList}, but may still be available on the system. Secure codecs
+ that exist can be instantiated by name only, by appending {@code ".secure"} to the name of a
+ regular codec (the name of all secure codecs must end in {@code ".secure"}.) {@link
+ #createByCodecName} will throw an {@code IOException} if the codec is not present on the system.
+ <p>
+ From {@link android.os.Build.VERSION_CODES#LOLLIPOP} onwards, you should use the {@link
+ CodecCapabilities#FEATURE_SecurePlayback} feature in the media format to create a secure decoder.
+
+ <h3>Initialization</h3>
+ <p>
+ After creating the codec, you can set a callback using {@link #setCallback setCallback} if you
+ want to process data asynchronously. Then, {@linkplain #configure configure} the codec using the
+ specific media format. This is when you can specify the output {@link Surface} for video
+ producers &ndash; codecs that generate raw video data (e.g. video decoders). This is also when
+ you can set the decryption parameters for secure codecs (see {@link MediaCrypto}). Finally, since
+ some codecs can operate in multiple modes, you must specify whether you want it to work as a
+ decoder or an encoder.
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you can query the resulting input and
+ output format in the Configured state. You can use this to verify the resulting configuration,
+ e.g. color formats, before starting the codec.
+ <p>
+ If you want to process raw input video buffers natively with a video consumer &ndash; a codec
+ that processes raw video input, such as a video encoder &ndash; create a destination Surface for
+ your input data using {@link #createInputSurface} after configuration. Alternately, set up the
+ codec to use a previously created {@linkplain #createPersistentInputSurface persistent input
+ surface} by calling {@link #setInputSurface}.
+
+ <h4 id=CSD><a name="CSD"></a>Codec-specific Data</h4>
+ <p>
+ Some formats, notably AAC audio and MPEG4, H.264 and H.265 video formats require the actual data
+ to be prefixed by a number of buffers containing setup data, or codec specific data. When
+ processing such compressed formats, this data must be submitted to the codec after {@link
+ #start} and before any frame data. Such data must be marked using the flag {@link
+ #BUFFER_FLAG_CODEC_CONFIG} in a call to {@link #queueInputBuffer queueInputBuffer}.
+ <p>
+ Codec-specific data can also be included in the format passed to {@link #configure configure} in
+ ByteBuffer entries with keys "csd-0", "csd-1", etc. These keys are always included in the track
+ {@link MediaFormat} obtained from the {@link MediaExtractor#getTrackFormat MediaExtractor}.
+ Codec-specific data in the format is automatically submitted to the codec upon {@link #start};
+ you <strong>MUST NOT</strong> submit this data explicitly. If the format did not contain codec
+ specific data, you can choose to submit it using the specified number of buffers in the correct
+ order, according to the format requirements. Alternately, you can concatenate all codec-specific
+ data and submit it as a single codec-config buffer.
+ <p>
+ Android uses the following codec-specific data buffers. These are also required to be set in
+ the track format for proper {@link MediaMuxer} track configuration. Each parameter set and
+ codec-specific-data must start with a start code of {@code "\x00\x00\x00\x01"}.
+ <p>
+ <style>td.NA { background: #ccc; } .mid > tr > td { vertical-align: middle; }</style>
+ <table>
+ <thead>
+ <th>Format</th>
+ <th>CSD buffer #0</th>
+ <th>CSD buffer #1</th>
+ </thead>
+ <tbody class=mid>
+ <tr>
+ <td>AAC</td>
+ <td>Decoder-specific information from ESDS</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>MPEG-4</td>
+ <td>Decoder-specific information from ESDS</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>H.264 AVC</td>
+ <td>SPS (Sequence Parameter Sets)</td>
+ <td>PPS (Picture Parameter Sets)</td>
+ </tr>
+ <tr>
+ <td>H.265 HEVC</td>
+ <td>VPS (Video Parameter Sets) +<br>
+ SPS (Sequence Parameter Sets) +<br>
+ PPS (Picture Parameter Sets)</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <p class=note>
+ <strong>Note:</strong> care must be taken if the codec is flushed immediately or shortly
+ after start, before any output buffer or output format change has been returned, as the codec
+ specific data may be lost during the flush. You must resubmit the data using buffers marked with
+ {@link #BUFFER_FLAG_CODEC_CONFIG} after such flush to ensure proper codec operation.
+ <p>
+ Encoders (or codecs that generate compressed data) will create and return the codec specific data
+ before any valid output buffer in output buffers marked with the {@linkplain
+ #BUFFER_FLAG_CODEC_CONFIG codec-config flag}. Buffers containing codec-specific-data have no
+ meaningful timestamps.
+
+ <h3>Data Processing</h3>
+ <p>
+ Each codec maintains a set of input and output buffers that are referred to by a buffer-ID in
+ API calls. After a successful call to {@link #start} the client "owns" neither input nor output
+ buffers. In synchronous mode, call {@link #dequeueInputBuffer dequeueInput}/{@link
+ #dequeueOutputBuffer OutputBuffer(&hellip;)} to obtain (get ownership of) an input or output
+ buffer from the codec. In asynchronous mode, you will automatically receive available buffers via
+ the {@link Callback#onInputBufferAvailable MediaCodec.Callback.onInput}/{@link
+ Callback#onOutputBufferAvailable OutputBufferAvailable(&hellip;)} callbacks.
+ <p>
+ Upon obtaining an input buffer, fill it with data and submit it to the codec using {@link
+ #queueInputBuffer queueInputBuffer} &ndash; or {@link #queueSecureInputBuffer
+ queueSecureInputBuffer} if using decryption. Do not submit multiple input buffers with the same
+ timestamp (unless it is <a href="#CSD">codec-specific data</a> marked as such).
+ <p>
+ The codec in turn will return a read-only output buffer via the {@link
+ Callback#onOutputBufferAvailable onOutputBufferAvailable} callback in asynchronous mode, or in
+ response to a {@link #dequeueOutputBuffer dequeuOutputBuffer} call in synchronous mode. After the
+ output buffer has been processed, call one of the {@link #releaseOutputBuffer
+ releaseOutputBuffer} methods to return the buffer to the codec.
+ <p>
+ While you are not required to resubmit/release buffers immediately to the codec, holding onto
+ input and/or output buffers may stall the codec, and this behavior is device dependent. E.g. it
+ is possible that a codec may hold off on generating output buffers until all outstanding buffers
+ have been released/resubmitted. Therefore, try to hold onto to available buffers as little as
+ possible.
+ <p>
+ Depending on the API version, you can process data in three ways:
+ <table>
+ <thead>
+ <tr>
+ <th>Processing Mode</th>
+ <th>API version <= 20<br>Jelly Bean/KitKat</th>
+ <th>API version >= 21<br>Lollipop and later</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td>Synchronous API using buffer arrays</td>
+ <td>Supported</td>
+ <td>Deprecated</td>
+ </tr>
+ <tr>
+ <td>Synchronous API using buffers</td>
+ <td class=NA>Not Available</td>
+ <td>Supported</td>
+ </tr>
+ <tr>
+ <td>Asynchronous API using buffers</td>
+ <td class=NA>Not Available</td>
+ <td>Supported</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <h4>Asynchronous Processing using Buffers</h4>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the preferred method is to process data
+ asynchronously by setting a callback before calling {@link #configure configure}. Asynchronous
+ mode changes the state transitions slightly, because you must call {@link #start} after {@link
+ #flush} to transition the codec to the Running sub-state and start receiving input buffers.
+ Similarly, upon an initial call to {@code start} the codec will move directly to the Running
+ sub-state and start passing available input buffers via the callback.
+ <p>
+ <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_async_states.svg"><img
+ src="../../../images/media/mediacodec_async_states.png" style="width: 516px; height: 353px"
+ alt="MediaCodec state diagram for asynchronous operation"></object></center>
+ <p>
+ MediaCodec is typically used like this in asynchronous mode:
+ <pre class=prettyprint>
+ MediaCodec codec = MediaCodec.createCodecByName(name);
+ MediaFormat mOutputFormat; // member variable
+ codec.setCallback(new MediaCodec.Callback() {
+ {@literal @Override}
+ void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
+ ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
+ // fill inputBuffer with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+
+ {@literal @Override}
+ void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, &hellip;) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
+ MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
+ // bufferFormat is equivalent to mOutputFormat
+ // outputBuffer is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ }
+
+ {@literal @Override}
+ void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
+ // Subsequent data will conform to new format.
+ // Can ignore if using getOutputFormat(outputBufferId)
+ mOutputFormat = format; // option B
+ }
+
+ {@literal @Override}
+ void onError(&hellip;) {
+ &hellip;
+ }
+ });
+ codec.configure(format, &hellip;);
+ mOutputFormat = codec.getOutputFormat(); // option B
+ codec.start();
+ // wait for processing to complete
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>Synchronous Processing using Buffers</h4>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you should retrieve input and output
+ buffers using {@link #getInputBuffer getInput}/{@link #getOutputBuffer OutputBuffer(int)} and/or
+ {@link #getInputImage getInput}/{@link #getOutputImage OutputImage(int)} even when using the
+ codec in synchronous mode. This allows certain optimizations by the framework, e.g. when
+ processing dynamic content. This optimization is disabled if you call {@link #getInputBuffers
+ getInput}/{@link #getOutputBuffers OutputBuffers()}.
+
+ <p class=note>
+ <strong>Note:</strong> do not mix the methods of using buffers and buffer arrays at the same
+ time. Specifically, only call {@code getInput}/{@code OutputBuffers} directly after {@link
+ #start} or after having dequeued an output buffer ID with the value of {@link
+ #INFO_OUTPUT_FORMAT_CHANGED}.
+ <p>
+ MediaCodec is typically used like this in synchronous mode:
+ <pre>
+ MediaCodec codec = MediaCodec.createCodecByName(name);
+ codec.configure(format, &hellip;);
+ MediaFormat outputFormat = codec.getOutputFormat(); // option B
+ codec.start();
+ for (;;) {
+ int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
+ if (inputBufferId &gt;= 0) {
+ ByteBuffer inputBuffer = codec.getInputBuffer(&hellip;);
+ // fill inputBuffer with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+ int outputBufferId = codec.dequeueOutputBuffer(&hellip;);
+ if (outputBufferId &gt;= 0) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
+ MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
+ // bufferFormat is identical to outputFormat
+ // outputBuffer is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // Subsequent data will conform to new format.
+ // Can ignore if using getOutputFormat(outputBufferId)
+ outputFormat = codec.getOutputFormat(); // option B
+ }
+ }
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>Synchronous Processing using Buffer Arrays (deprecated)</h4>
+ <p>
+ In versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and before, the set of input and
+ output buffers are represented by the {@code ByteBuffer[]} arrays. After a successful call to
+ {@link #start}, retrieve the buffer arrays using {@link #getInputBuffers getInput}/{@link
+ #getOutputBuffers OutputBuffers()}. Use the buffer ID-s as indices into these arrays (when
+ non-negative), as demonstrated in the sample below. Note that there is no inherent correlation
+ between the size of the arrays and the number of input and output buffers used by the system,
+ although the array size provides an upper bound.
+ <pre>
+ MediaCodec codec = MediaCodec.createCodecByName(name);
+ codec.configure(format, &hellip;);
+ codec.start();
+ ByteBuffer[] inputBuffers = codec.getInputBuffers();
+ ByteBuffer[] outputBuffers = codec.getOutputBuffers();
+ for (;;) {
+ int inputBufferId = codec.dequeueInputBuffer(&hellip;);
+ if (inputBufferId &gt;= 0) {
+ // fill inputBuffers[inputBufferId] with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+ int outputBufferId = codec.dequeueOutputBuffer(&hellip;);
+ if (outputBufferId &gt;= 0) {
+ // outputBuffers[outputBufferId] is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffers = codec.getOutputBuffers();
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // Subsequent data will conform to new format.
+ MediaFormat format = codec.getOutputFormat();
+ }
+ }
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>End-of-stream Handling</h4>
+ <p>
+ When you reach the end of the input data, you must signal it to the codec by specifying the
+ {@link #BUFFER_FLAG_END_OF_STREAM} flag in the call to {@link #queueInputBuffer
+ queueInputBuffer}. You can do this on the last valid input buffer, or by submitting an additional
+ empty input buffer with the end-of-stream flag set. If using an empty buffer, the timestamp will
+ be ignored.
+ <p>
+ The codec will continue to return output buffers until it eventually signals the end of the
+ output stream by specifying the same end-of-stream flag in the {@link BufferInfo} set in {@link
+ #dequeueOutputBuffer dequeueOutputBuffer} or returned via {@link Callback#onOutputBufferAvailable
+ onOutputBufferAvailable}. This can be set on the last valid output buffer, or on an empty buffer
+ after the last valid output buffer. The timestamp of such empty buffer should be ignored.
+ <p>
+ Do not submit additional input buffers after signaling the end of the input stream, unless the
+ codec has been flushed, or stopped and restarted.
+
+ <h4>Using an Output Surface</h4>
+ <p>
+ The data processing is nearly identical to the ByteBuffer mode when using an output {@link
+ Surface}; however, the output buffers will not be accessible, and are represented as {@code null}
+ values. E.g. {@link #getOutputBuffer getOutputBuffer}/{@link #getOutputImage Image(int)} will
+ return {@code null} and {@link #getOutputBuffers} will return an array containing only {@code
+ null}-s.
+ <p>
+ When using an output Surface, you can select whether or not to render each output buffer on the
+ surface. You have three choices:
+ <ul>
+ <li><strong>Do not render the buffer:</strong> Call {@link #releaseOutputBuffer(int, boolean)
+ releaseOutputBuffer(bufferId, false)}.</li>
+ <li><strong>Render the buffer with the default timestamp:</strong> Call {@link
+ #releaseOutputBuffer(int, boolean) releaseOutputBuffer(bufferId, true)}.</li>
+ <li><strong>Render the buffer with a specific timestamp:</strong> Call {@link
+ #releaseOutputBuffer(int, long) releaseOutputBuffer(bufferId, timestamp)}.</li>
+ </ul>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#MNC}, the default timestamp is the {@linkplain
+ BufferInfo#presentationTimeUs presentation timestamp} of the buffer (converted to nanoseconds).
+ It was not defined prior to that.
+ <p>
+ Also since {@link android.os.Build.VERSION_CODES#MNC}, you can change the output Surface
+ dynamically using {@link #setOutputSurface setOutputSurface}.
+
+ <h4>Using an Input Surface</h4>
+ <p>
+ When using an input Surface, there are no accessible input buffers, as buffers are automatically
+ passed from the input surface to the codec. Calling {@link #dequeueInputBuffer
+ dequeueInputBuffer} will throw an {@code IllegalStateException}, and {@link #getInputBuffers}
+ returns a bogus {@code ByteBuffer[]} array that <strong>MUST NOT</strong> be written into.
+ <p>
+ Call {@link #signalEndOfInputStream} to signal end-of-stream. The input surface will stop
+ submitting data to the codec immediately after this call.
+ <p>
+
+ <h3>Seeking &amp; Adaptive Playback Support</h3>
+ <p>
+ Video decoders (and in general codecs that consume compressed video data) behave differently
+ regarding seek and format change whether or not they support and are configured for adaptive
+ playback. You can check if a decoder supports {@linkplain
+ CodecCapabilities#FEATURE_AdaptivePlayback adaptive playback} via {@link
+ CodecCapabilities#isFeatureSupported CodecCapabilities.isFeatureSupported(String)}. Adaptive
+ playback support for video decoders is only activated if you configure the codec to decode onto a
+ {@link Surface}.
+
+ <h4 id=KeyFrames><a name="KeyFrames"></a>Stream Boundary and Key Frames</h4>
+ <p>
+ It is important that the input data after {@link #start} or {@link #flush} starts at a suitable
+ stream boundary: the first frame must a key frame. A <em>key frame</em> can be decoded
+ completely on its own (for most codecs this means an I-frame), and no frames that are to be
+ displayed after a key frame refer to frames before the key frame.
+ <p>
+ The following table summarizes suitable key frames for various video formats.
+ <table>
+ <thead>
+ <tr>
+ <th>Format</th>
+ <th>Suitable key frame</th>
+ </tr>
+ </thead>
+ <tbody class=mid>
+ <tr>
+ <td>VP9/VP8</td>
+ <td>a suitable intraframe where no subsequent frames refer to frames prior to this frame.<br>
+ <i>(There is no specific name for such key frame.)</i></td>
+ </tr>
+ <tr>
+ <td>H.265 HEVC</td>
+ <td>IDR or CRA</td>
+ </tr>
+ <tr>
+ <td>H.264 AVC</td>
+ <td>IDR</td>
+ </tr>
+ <tr>
+ <td>MPEG-4<br>H.263<br>MPEG-2</td>
+ <td>a suitable I-frame where no subsequent frames refer to frames prior to this frame.<br>
+ <i>(There is no specific name for such key frame.)</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <h4>For decoders that do not support adaptive playback (including when not decoding onto a
+ Surface)</h4>
+ <p>
+ In order to start decoding data that is not adjacent to previously submitted data (i.e. after a
+ seek) you <strong>MUST</strong> flush the decoder. Since all output buffers are immediately
+ revoked at the point of the flush, you may want to first signal then wait for the end-of-stream
+ before you call {@code flush}. It is important that the input data after a flush starts at a
+ suitable stream boundary/key frame.
+ <p class=note>
+ <strong>Note:</strong> the format of the data submitted after a flush must not change; {@link
+ #flush} does not support format discontinuities; for that, a full {@link #stop} - {@link
+ #configure configure(&hellip;)} - {@link #start} cycle is necessary.
+
+ <p class=note>
+ <strong>Also note:</strong> if you flush the codec too soon after {@link #start} &ndash;
+ generally, before the first output buffer or output format change is received &ndash; you
+ will need to resubmit the codec-specific-data to the codec. See the <a
+ href="#CSD">codec-specific-data section</a> for more info.
+
+ <h4>For decoders that support and are configured for adaptive playback</h4>
+ <p>
+ In order to start decoding data that is not adjacent to previously submitted data (i.e. after a
+ seek) it is <em>not necessary</em> to flush the decoder; however, input data after the
+ discontinuity must start at a suitable stream boundary/key frame.
+ <p>
+ For some video formats - namely H.264, H.265, VP8 and VP9 - it is also possible to change the
+ picture size or configuration mid-stream. To do this you must package the entire new
+ codec-specific configuration data together with the key frame into a single buffer (including
+ any start codes), and submit it as a <strong>regular</strong> input buffer.
+ <p>
+ You will receive an {@link #INFO_OUTPUT_FORMAT_CHANGED} return value from {@link
+ #dequeueOutputBuffer dequeueOutputBuffer} or a {@link Callback#onOutputBufferAvailable
+ onOutputFormatChanged} callback just after the picture-size change takes place and before any
+ frames with the new size have been returned.
+ <p class=note>
+ <strong>Note:</strong> just as the case for codec-specific data, be careful when calling
+ {@link #flush} shortly after you have changed the picture size. If you have not received
+ confirmation of the picture size change, you will need to repeat the request for the new picture
+ size.
+
+ <h3>Error handling</h3>
+ <p>
+ The factory methods {@link #createByCodecName createByCodecName} and {@link #createDecoderByType
+ createDecoder}/{@link #createEncoderByType EncoderByType} throw {@code IOException} on failure
+ which you must catch or declare to pass up. MediaCodec methods throw {@code
+ IllegalStateException} when the method is called from a codec state that does not allow it; this
+ is typically due to incorrect application API usage. Methods involving secure buffers may throw
+ {@link CryptoException}, which has further error information obtainable from {@link
+ CryptoException#getErrorCode}.
+ <p>
+ Internal codec errors result in a {@link CodecException}, which may be due to media content
+ corruption, hardware failure, resource exhaustion, and so forth, even when the application is
+ correctly using the API. The recommended action when receiving a {@code CodecException}
+ can be determined by calling {@link CodecException#isRecoverable} and {@link
+ CodecException#isTransient}:
+ <ul>
+ <li><strong>recoverable errors:</strong> If {@code isRecoverable()} returns true, then call
+ {@link #stop}, {@link #configure configure(&hellip;)}, and {@link #start} to recover.</li>
+ <li><strong>transient errors:</strong> If {@code isTransient()} returns true, then resources are
+ temporarily unavailable and the method may be retried at a later time.</li>
+ <li><strong>fatal errors:</strong> If both {@code isRecoverable()} and {@code isTransient()}
+ return false, then the {@code CodecException} is fatal and the codec must be {@linkplain #reset
+ reset} or {@linkplain #release released}.</li>
+ </ul>
+ <p>
+ Both {@code isRecoverable()} and {@code isTransient()} do not return true at the same time.
+
+ <h2 id=History><a name="History"></a>Valid API Calls and API History</h2>
+ <p>
+ This sections summarizes the valid API calls in each state and the API history of the MediaCodec
+ class. For API version numbers, see {@link android.os.Build.VERSION_CODES}.
+
+ <style>
+ .api > tr > th, td { text-align: center; padding: 4px 4px; }
+ .api > tr > th { vertical-align: bottom; }
+ .api > tr > td { vertical-align: middle; }
+ .sml > tr > th, td { text-align: center; padding: 2px 4px; }
+ .fn { text-align: left; }
+ .fn > code > a { font: 14px/19px Roboto Condensed, sans-serif; }
+ .deg45 {
+ white-space: nowrap; background: none; border: none; vertical-align: bottom;
+ width: 30px; height: 83px;
+ }
+ .deg45 > div {
+ transform: skew(-45deg, 0deg) translate(1px, -67px);
+ transform-origin: bottom left 0;
+ width: 30px; height: 20px;
+ }
+ .deg45 > div > div { border: 1px solid #ddd; background: #999; height: 90px; width: 42px; }
+ .deg45 > div > div > div { transform: skew(45deg, 0deg) translate(-55px, 55px) rotate(-45deg); }
+ </style>
+
+ <table align="right" style="width: 0%">
+ <thead>
+ <tr><th>Symbol</th><th>Meaning</th></tr>
+ </thead>
+ <tbody class=sml>
+ <tr><td>&#9679;</td><td>Supported</td></tr>
+ <tr><td>&#8277;</td><td>Semantics changed</td></tr>
+ <tr><td>&#9675;</td><td>Experimental support</td></tr>
+ <tr><td>[ ]</td><td>Deprecated</td></tr>
+ <tr><td>&#9099;</td><td>Restricted to surface input mode</td></tr>
+ <tr><td>&#9094;</td><td>Restricted to surface output mode</td></tr>
+ <tr><td>&#9639;</td><td>Restricted to ByteBuffer input mode</td></tr>
+ <tr><td>&#8617;</td><td>Restricted to synchronous mode</td></tr>
+ <tr><td>&#8644;</td><td>Restricted to asynchronous mode</td></tr>
+ <tr><td>( )</td><td>Can be called, but shouldn't</td></tr>
+ </tbody>
+ </table>
+
+ <table style="width: 100%;">
+ <thead class=api>
+ <tr>
+ <th class=deg45><div><div style="background:#4285f4"><div>Uninitialized</div></div></div></th>
+ <th class=deg45><div><div style="background:#f4b400"><div>Configured</div></div></div></th>
+ <th class=deg45><div><div style="background:#e67c73"><div>Flushed</div></div></div></th>
+ <th class=deg45><div><div style="background:#0f9d58"><div>Running</div></div></div></th>
+ <th class=deg45><div><div style="background:#f7cb4d"><div>End of Stream</div></div></div></th>
+ <th class=deg45><div><div style="background:#db4437"><div>Error</div></div></div></th>
+ <th class=deg45><div><div style="background:#666"><div>Released</div></div></div></th>
+ <th></th>
+ <th colspan="8">SDK Version</th>
+ </tr>
+ <tr>
+ <th colspan="7">State</th>
+ <th>Method</th>
+ <th>16</th>
+ <th>17</th>
+ <th>18</th>
+ <th>19</th>
+ <th>20</th>
+ <th>21</th>
+ <th>22</th>
+ <th>23</th>
+ </tr>
+ </thead>
+ <tbody class=api>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createByCodecName createByCodecName}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createDecoderByType createDecoderByType}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createEncoderByType createEncoderByType}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createPersistentInputSurface createPersistentInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #configure configure}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>18+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #createInputSurface createInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #dequeueInputBuffer dequeueInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#8277;&#9639;&#8617;</td>
+ <td>&#9639;&#8617;</td>
+ <td>&#9639;&#8617;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #dequeueOutputBuffer dequeueOutputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;&#8617;</td>
+ <td>&#8617;</td>
+ <td>&#8617;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #flush flush}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td class=fn>{@link #getCodecInfo getCodecInfo}</td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputBuffer getInputBuffer}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputBuffers getInputBuffers}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>[&#8277;&#8617;]</td>
+ <td>[&#8617;]</td>
+ <td>[&#8617;]</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>(21+)</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputFormat getInputFormat}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputImage getInputImage}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td class=fn>{@link #getName getName}</td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputBuffer getOutputBuffer}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputBuffers getOutputBuffers}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>[&#8277;&#8617;]</td>
+ <td>[&#8617;]</td>
+ <td>[&#8617;]</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>21+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputFormat()}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputFormat(int)}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputImage getOutputImage}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #queueInputBuffer queueInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #queueSecureInputBuffer queueSecureInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td class=fn>{@link #release release}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #releaseOutputBuffer(int, boolean)}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #releaseOutputBuffer(int, long)}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td class=fn>{@link #reset reset}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setCallback(Callback) setCallback}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>{@link #setCallback(Callback, Handler) &#8277;}</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>23+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setInputSurface setInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>(23+)</td>
+ <td>(23+)</td>
+ <td class=fn>{@link #setOnFrameRenderedListener setOnFrameRenderedListener}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675; &#9094;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setOutputSurface setOutputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>(19+)</td>
+ <td>-</td>
+ <td class=fn>{@link #setParameters setParameters}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td class=fn>{@link #setVideoScalingMode setVideoScalingMode}</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #signalEndOfInputStream signalEndOfInputStream}</td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>16+</td>
+ <td>21+(&#8644;)</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #start start}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #stop stop}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ </tbody>
+ </table>
*/
final public class MediaCodec {
/**
@@ -254,7 +1362,7 @@ final public class MediaCodec {
* {@link #BUFFER_FLAG_END_OF_STREAM}.
*/
public void set(
- int newOffset, int newSize, long newTimeUs, int newFlags) {
+ int newOffset, int newSize, long newTimeUs, @BufferFlag int newFlags) {
offset = newOffset;
size = newSize;
presentationTimeUs = newTimeUs;
@@ -294,7 +1402,16 @@ final public class MediaCodec {
* be an empty buffer, whose sole purpose is to carry the end-of-stream
* marker.
*/
+ @BufferFlag
public int flags;
+
+ /** @hide */
+ @NonNull
+ public BufferInfo dup() {
+ BufferInfo copy = new BufferInfo();
+ copy.set(offset, size, presentationTimeUs, flags);
+ return copy;
+ }
};
// The follow flag constants MUST stay in sync with their equivalents
@@ -326,11 +1443,37 @@ final public class MediaCodec {
*/
public static final int BUFFER_FLAG_END_OF_STREAM = 4;
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ BUFFER_FLAG_SYNC_FRAME,
+ BUFFER_FLAG_KEY_FRAME,
+ BUFFER_FLAG_CODEC_CONFIG,
+ BUFFER_FLAG_END_OF_STREAM,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface BufferFlag {}
+
+ private static class FrameRenderedInfo {
+ public long mPresentationTimeUs;
+ public long mNanoTime;
+ public FrameRenderedInfo(long presentationTimeUs, long nanoTime) {
+ mPresentationTimeUs = presentationTimeUs;
+ mNanoTime = nanoTime;
+ }
+ }
+
private EventHandler mEventHandler;
+ private EventHandler mOnFrameRenderedHandler;
+ private EventHandler mCallbackHandler;
private Callback mCallback;
+ private OnFrameRenderedListener mOnFrameRenderedListener;
+ private Object mListenerLock = new Object();
private static final int EVENT_CALLBACK = 1;
private static final int EVENT_SET_CALLBACK = 2;
+ private static final int EVENT_FRAME_RENDERED = 3;
private static final int CB_INPUT_AVAILABLE = 1;
private static final int CB_OUTPUT_AVAILABLE = 2;
@@ -340,13 +1483,13 @@ final public class MediaCodec {
private class EventHandler extends Handler {
private MediaCodec mCodec;
- public EventHandler(MediaCodec codec, Looper looper) {
+ public EventHandler(@NonNull MediaCodec codec, @NonNull Looper looper) {
super(looper);
mCodec = codec;
}
@Override
- public void handleMessage(Message msg) {
+ public void handleMessage(@NonNull Message msg) {
switch (msg.what) {
case EVENT_CALLBACK:
{
@@ -358,6 +1501,15 @@ final public class MediaCodec {
mCallback = (MediaCodec.Callback) msg.obj;
break;
}
+ case EVENT_FRAME_RENDERED:
+ synchronized (mListenerLock) {
+ FrameRenderedInfo info = (FrameRenderedInfo)msg.obj;
+ if (mOnFrameRenderedListener != null) {
+ mOnFrameRenderedListener.onFrameRendered(
+ mCodec, info.mPresentationTimeUs, info.mNanoTime);
+ }
+ break;
+ }
default:
{
break;
@@ -365,7 +1517,7 @@ final public class MediaCodec {
}
}
- private void handleCallback(Message msg) {
+ private void handleCallback(@NonNull Message msg) {
if (mCallback == null) {
return;
}
@@ -414,6 +1566,8 @@ final public class MediaCodec {
}
}
+ private boolean mHasSurface = false;
+
/**
* Instantiate a decoder supporting input data of the given mime type.
*
@@ -439,7 +1593,8 @@ final public class MediaCodec {
* @throws IllegalArgumentException if type is not a valid mime type.
* @throws NullPointerException if type is null.
*/
- public static MediaCodec createDecoderByType(String type)
+ @NonNull
+ public static MediaCodec createDecoderByType(@NonNull String type)
throws IOException {
return new MediaCodec(type, true /* nameIsType */, false /* encoder */);
}
@@ -451,7 +1606,8 @@ final public class MediaCodec {
* @throws IllegalArgumentException if type is not a valid mime type.
* @throws NullPointerException if type is null.
*/
- public static MediaCodec createEncoderByType(String type)
+ @NonNull
+ public static MediaCodec createEncoderByType(@NonNull String type)
throws IOException {
return new MediaCodec(type, true /* nameIsType */, true /* encoder */);
}
@@ -465,14 +1621,15 @@ final public class MediaCodec {
* @throws IllegalArgumentException if name is not valid.
* @throws NullPointerException if name is null.
*/
- public static MediaCodec createByCodecName(String name)
+ @NonNull
+ public static MediaCodec createByCodecName(@NonNull String name)
throws IOException {
return new MediaCodec(
name, false /* nameIsType */, false /* unused */);
}
private MediaCodec(
- String name, boolean nameIsType, boolean encoder) {
+ @NonNull String name, boolean nameIsType, boolean encoder) {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
@@ -481,6 +1638,9 @@ final public class MediaCodec {
} else {
mEventHandler = null;
}
+ mCallbackHandler = mEventHandler;
+ mOnFrameRenderedHandler = mEventHandler;
+
mBufferLock = new Object();
native_setup(name, nameIsType, encoder);
@@ -492,14 +1652,14 @@ final public class MediaCodec {
}
/**
- * Returns the codec to its initial (Initialized) state.
+ * Returns the codec to its initial (Uninitialized) state.
*
* Call this if an {@link MediaCodec.CodecException#isRecoverable unrecoverable}
* error has occured to reset the codec to its initial state after creation.
*
* @throws CodecException if an unrecoverable error has occured and the codec
* could not be reset.
- * @throws IllegalStateException if in the Uninitialized state.
+ * @throws IllegalStateException if in the Released state.
*/
public final void reset() {
freeAllTrackedBuffers(); // free buffers first
@@ -525,32 +1685,45 @@ final public class MediaCodec {
*/
public static final int CONFIGURE_FLAG_ENCODE = 1;
+ /** @hide */
+ @IntDef(flag = true, value = { CONFIGURE_FLAG_ENCODE })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ConfigureFlag {}
+
/**
* Configures a component.
*
* @param format The format of the input data (decoder) or the desired
- * format of the output data (encoder).
+ * format of the output data (encoder). Passing {@code null}
+ * as {@code format} is equivalent to passing an
+ * {@link MediaFormat#MediaFormat an empty mediaformat}.
* @param surface Specify a surface on which to render the output of this
- * decoder.
+ * decoder. Pass {@code null} as {@code surface} if the
+ * codec does not generate raw video output (e.g. not a video
+ * decoder) and/or if you want to configure the codec for
+ * {@link ByteBuffer} output.
* @param crypto Specify a crypto object to facilitate secure decryption
- * of the media data.
+ * of the media data. Pass {@code null} as {@code crypto} for
+ * non-secure codecs.
* @param flags Specify {@link #CONFIGURE_FLAG_ENCODE} to configure the
* component as an encoder.
* @throws IllegalArgumentException if the surface has been released (or is invalid),
* or the format is unacceptable (e.g. missing a mandatory key),
* or the flags are not set properly
* (e.g. missing {@link #CONFIGURE_FLAG_ENCODE} for an encoder).
- * @throws IllegalStateException if not in the Initialized state.
+ * @throws IllegalStateException if not in the Uninitialized state.
+ * @throws CryptoException upon DRM error.
+ * @throws CodecException upon codec error.
*/
public void configure(
- MediaFormat format,
- Surface surface, MediaCrypto crypto, int flags) {
- Map<String, Object> formatMap = format.getMap();
-
+ @Nullable MediaFormat format,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto,
+ @ConfigureFlag int flags) {
String[] keys = null;
Object[] values = null;
if (format != null) {
+ Map<String, Object> formatMap = format.getMap();
keys = new String[formatMap.size()];
values = new Object[formatMap.size()];
@@ -574,14 +1747,87 @@ final public class MediaCodec {
}
}
+ mHasSurface = surface != null;
+
native_configure(keys, values, surface, crypto, flags);
}
- private native final void native_setCallback(Callback cb);
+ /**
+ * Dynamically sets the output surface of a codec.
+ * <p>
+ * This can only be used if the codec was configured with an output surface. The
+ * new output surface should have a compatible usage type to the original output surface.
+ * E.g. codecs may not support switching from a SurfaceTexture (GPU readable) output
+ * to ImageReader (software readable) output.
+ * @param surface the output surface to use. It must not be {@code null}.
+ * @throws IllegalStateException if the codec does not support setting the output
+ * surface in the current state.
+ * @throws IllegalArgumentException if the new surface is not of a suitable type for the codec.
+ */
+ public void setOutputSurface(@NonNull Surface surface) {
+ if (!mHasSurface) {
+ throw new IllegalStateException("codec was not configured for an output surface");
+ }
+ native_setSurface(surface);
+ }
+
+ private native void native_setSurface(@NonNull Surface surface);
+
+ /**
+ * Create a persistent input surface that can be used with codecs that normally have an input
+ * surface, such as video encoders. A persistent input can be reused by subsequent
+ * {@link MediaCodec} or {@link MediaRecorder} instances, but can only be used by at
+ * most one codec or recorder instance concurrently.
+ * <p>
+ * The application is responsible for calling release() on the Surface when done.
+ *
+ * @return an input surface that can be used with {@link #setInputSurface}.
+ */
+ @NonNull
+ public static Surface createPersistentInputSurface() {
+ return native_createPersistentInputSurface();
+ }
+
+ static class PersistentSurface extends Surface {
+ @SuppressWarnings("unused")
+ PersistentSurface() {} // used by native
+
+ @Override
+ public void release() {
+ native_releasePersistentInputSurface(this);
+ super.release();
+ }
+
+ private long mPersistentObject;
+ };
+
+ /**
+ * Configures the codec (e.g. encoder) to use a persistent input surface in place of input
+ * buffers. This may only be called after {@link #configure} and before {@link #start}, in
+ * lieu of {@link #createInputSurface}.
+ * @param surface a persistent input surface created by {@link #createPersistentInputSurface}
+ * @throws IllegalStateException if not in the Configured state or does not require an input
+ * surface.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link #createPersistentInputSurface}.
+ */
+ public void setInputSurface(@NonNull Surface surface) {
+ if (!(surface instanceof PersistentSurface)) {
+ throw new IllegalArgumentException("not a PersistentSurface");
+ }
+ native_setInputSurface(surface);
+ }
+
+ @NonNull
+ private static native final PersistentSurface native_createPersistentInputSurface();
+ private static native final void native_releasePersistentInputSurface(@NonNull Surface surface);
+ private native final void native_setInputSurface(@NonNull Surface surface);
+
+ private native final void native_setCallback(@Nullable Callback cb);
private native final void native_configure(
- String[] keys, Object[] values,
- Surface surface, MediaCrypto crypto, int flags);
+ @Nullable String[] keys, @Nullable Object[] values,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto, @ConfigureFlag int flags);
/**
* Requests a Surface to use as the input to an encoder, in place of input buffers. This
@@ -595,6 +1841,7 @@ final public class MediaCodec {
* unexpected results.
* @throws IllegalStateException if not in the Configured state.
*/
+ @NonNull
public native final Surface createInputSurface();
/**
@@ -622,15 +1869,20 @@ final public class MediaCodec {
* remains active and ready to be {@link #start}ed again.
* To ensure that it is available to other client call {@link #release}
* and don't just rely on garbage collection to eventually do this for you.
- * @throws IllegalStateException if in the Uninitialized state.
+ * @throws IllegalStateException if in the Released state.
*/
public final void stop() {
native_stop();
freeAllTrackedBuffers();
- if (mEventHandler != null) {
- mEventHandler.removeMessages(EVENT_CALLBACK);
- mEventHandler.removeMessages(EVENT_SET_CALLBACK);
+ synchronized (mListenerLock) {
+ if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+ if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
}
}
@@ -668,7 +1920,7 @@ final public class MediaCodec {
* Thrown when an internal codec error occurs.
*/
public final static class CodecException extends IllegalStateException {
- CodecException(int errorCode, int actionCode, String detailMessage) {
+ CodecException(int errorCode, int actionCode, @Nullable String detailMessage) {
super(detailMessage);
mErrorCode = errorCode;
mActionCode = actionCode;
@@ -698,11 +1950,7 @@ final public class MediaCodec {
}
/**
- * Retrieve the error code associated with a CodecException.
- * This is opaque diagnostic information and may depend on
- * hardware or API level.
- *
- * @hide
+ * Retrieve the error code associated with a CodecException
*/
public int getErrorCode() {
return mErrorCode;
@@ -714,10 +1962,30 @@ final public class MediaCodec {
* since this string will not be localized or generally
* comprehensible to end-users.
*/
- public String getDiagnosticInfo() {
+ public @NonNull String getDiagnosticInfo() {
return mDiagnosticInfo;
}
+ /**
+ * This indicates required resource was not able to be allocated.
+ */
+ public static final int ERROR_INSUFFICIENT_RESOURCE = 1100;
+
+ /**
+ * This indicates the resource manager reclaimed the media resource used by the codec.
+ * <p>
+ * With this exception, the codec must be released, as it has moved to terminal state.
+ */
+ public static final int ERROR_RECLAIMED = 1101;
+
+ /** @hide */
+ @IntDef({
+ ERROR_INSUFFICIENT_RESOURCE,
+ ERROR_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReasonCode {}
+
/* Must be in sync with android_media_MediaCodec.cpp */
private final static int ACTION_TRANSIENT = 1;
private final static int ACTION_RECOVERABLE = 2;
@@ -731,15 +1999,15 @@ final public class MediaCodec {
* Thrown when a crypto error occurs while queueing a secure input buffer.
*/
public final static class CryptoException extends RuntimeException {
- public CryptoException(int errorCode, String detailMessage) {
+ public CryptoException(int errorCode, @Nullable String detailMessage) {
super(detailMessage);
mErrorCode = errorCode;
}
/**
- * This indicates that no key has been set to perform the requested
- * decrypt operation. The operation can be retried after adding
- * a decryption key.
+ * This indicates that the requested key was not found when trying to
+ * perform a decrypt operation. The operation can be retried after adding
+ * the correct decryption key.
*/
public static final int ERROR_NO_KEY = 1;
@@ -764,9 +2032,20 @@ final public class MediaCodec {
*/
public static final int ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
+ /** @hide */
+ @IntDef({
+ ERROR_NO_KEY,
+ ERROR_KEY_EXPIRED,
+ ERROR_RESOURCE_BUSY,
+ ERROR_INSUFFICIENT_OUTPUT_PROTECTION,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CryptoErrorCode {}
+
/**
* Retrieve the error code associated with a CryptoException
*/
+ @CryptoErrorCode
public int getErrorCode() {
return mErrorCode;
}
@@ -860,10 +2139,10 @@ final public class MediaCodec {
public final static class CryptoInfo {
public void set(
int newNumSubSamples,
- int[] newNumBytesOfClearData,
- int[] newNumBytesOfEncryptedData,
- byte[] newKey,
- byte[] newIV,
+ @NonNull int[] newNumBytesOfClearData,
+ @NonNull int[] newNumBytesOfEncryptedData,
+ @NonNull byte[] newKey,
+ @NonNull byte[] newIV,
int newMode) {
numSubSamples = newNumSubSamples;
numBytesOfClearData = newNumBytesOfClearData;
@@ -945,7 +2224,7 @@ final public class MediaCodec {
public final void queueSecureInputBuffer(
int index,
int offset,
- CryptoInfo info,
+ @NonNull CryptoInfo info,
long presentationTimeUs,
int flags) throws CryptoException {
synchronized(mBufferLock) {
@@ -964,7 +2243,7 @@ final public class MediaCodec {
private native final void native_queueSecureInputBuffer(
int index,
int offset,
- CryptoInfo info,
+ @NonNull CryptoInfo info,
long presentationTimeUs,
int flags) throws CryptoException;
@@ -1018,6 +2297,15 @@ final public class MediaCodec {
*/
public static final int INFO_OUTPUT_BUFFERS_CHANGED = -3;
+ /** @hide */
+ @IntDef({
+ INFO_TRY_AGAIN_LATER,
+ INFO_OUTPUT_FORMAT_CHANGED,
+ INFO_OUTPUT_BUFFERS_CHANGED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface OutputBufferInfo {}
+
/**
* Dequeue an output buffer, block at most "timeoutUs" microseconds.
* Returns the index of an output buffer that has been successfully
@@ -1028,21 +2316,25 @@ final public class MediaCodec {
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @OutputBufferInfo
public final int dequeueOutputBuffer(
- BufferInfo info, long timeoutUs) {
+ @NonNull BufferInfo info, long timeoutUs) {
int res = native_dequeueOutputBuffer(info, timeoutUs);
synchronized(mBufferLock) {
if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
cacheBuffers(false /* input */);
} else if (res >= 0) {
validateOutputByteBuffer(mCachedOutputBuffers, res, info);
+ if (mHasSurface) {
+ mDequeuedOutputInfos.put(res, info.dup());
+ }
}
}
return res;
}
private native final int native_dequeueOutputBuffer(
- BufferInfo info, long timeoutUs);
+ @NonNull BufferInfo info, long timeoutUs);
/**
* If you are done with a buffer, use this call to return the buffer to
@@ -1062,13 +2354,34 @@ final public class MediaCodec {
* @throws MediaCodec.CodecException upon codec error.
*/
public final void releaseOutputBuffer(int index, boolean render) {
+ BufferInfo info = null;
synchronized(mBufferLock) {
invalidateByteBuffer(mCachedOutputBuffers, index);
mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
}
+ // TODO
+ // until codec and libgui supports callback, assume frame is rendered within 50 ms
+ postRenderedCallback(render, info, 50 /* delayMs */);
releaseOutputBuffer(index, render, false /* updatePTS */, 0 /* dummy */);
}
+ private void postRenderedCallback(boolean render, @Nullable BufferInfo info, long delayMs) {
+ if (render && info != null) {
+ synchronized (mListenerLock) {
+ if (mOnFrameRenderedListener != null) {
+ FrameRenderedInfo obj = new FrameRenderedInfo(
+ info.presentationTimeUs, System.nanoTime() + delayMs * 1000000);
+ Message msg = mOnFrameRenderedHandler.obtainMessage(
+ EVENT_FRAME_RENDERED, obj);
+ mOnFrameRenderedHandler.sendMessageDelayed(msg, delayMs);
+ }
+ }
+ }
+ }
+
/**
* If you are done with a buffer, use this call to update its surface timestamp
* and return it to the codec to render it on the output surface. If you
@@ -1119,10 +2432,20 @@ final public class MediaCodec {
* @throws MediaCodec.CodecException upon codec error.
*/
public final void releaseOutputBuffer(int index, long renderTimestampNs) {
+ BufferInfo info = null;
synchronized(mBufferLock) {
invalidateByteBuffer(mCachedOutputBuffers, index);
mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
}
+ // TODO
+ // until codec and libgui supports callback, assume frame is rendered at the
+ // render time or 16 ms from now, whichever is later.
+ postRenderedCallback(
+ true /* render */, info,
+ Math.max(renderTimestampNs - System.nanoTime(), 16666666) / 1000000);
releaseOutputBuffer(
index, true /* render */, true /* updatePTS */, renderTimestampNs);
}
@@ -1151,6 +2474,7 @@ final public class MediaCodec {
* Configured state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public final MediaFormat getOutputFormat() {
return new MediaFormat(getFormatNative(false /* input */));
}
@@ -1165,6 +2489,7 @@ final public class MediaCodec {
* Configured state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public final MediaFormat getInputFormat() {
return new MediaFormat(getFormatNative(true /* input */));
}
@@ -1178,12 +2503,15 @@ final public class MediaCodec {
* @return the format for the output buffer, or null if the index
* is not a dequeued output buffer.
*/
+ @NonNull
public final MediaFormat getOutputFormat(int index) {
return new MediaFormat(getOutputFormatNative(index));
}
+ @NonNull
private native final Map<String, Object> getFormatNative(boolean input);
+ @NonNull
private native final Map<String, Object> getOutputFormatNative(int index);
// used to track dequeued buffers
@@ -1205,12 +2533,12 @@ final public class MediaCodec {
}
}
- public void setImage(Image image) {
+ public void setImage(@Nullable Image image) {
free();
mImage = image;
}
- public void setByteBuffer(ByteBuffer buffer) {
+ public void setByteBuffer(@Nullable ByteBuffer buffer) {
free();
mByteBuffer = buffer;
}
@@ -1227,7 +2555,7 @@ final public class MediaCodec {
}
}
- public void put(int index, ByteBuffer newBuffer) {
+ public void put(int index, @Nullable ByteBuffer newBuffer) {
CodecBuffer buffer = mMap.get(index);
if (buffer == null) { // likely
buffer = new CodecBuffer();
@@ -1236,7 +2564,7 @@ final public class MediaCodec {
buffer.setByteBuffer(newBuffer);
}
- public void put(int index, Image newImage) {
+ public void put(int index, @Nullable Image newImage) {
CodecBuffer buffer = mMap.get(index);
if (buffer == null) { // likely
buffer = new CodecBuffer();
@@ -1257,10 +2585,12 @@ final public class MediaCodec {
private ByteBuffer[] mCachedOutputBuffers;
private final BufferMap mDequeuedInputBuffers = new BufferMap();
private final BufferMap mDequeuedOutputBuffers = new BufferMap();
+ private final Map<Integer, BufferInfo> mDequeuedOutputInfos =
+ new HashMap<Integer, BufferInfo>();
final private Object mBufferLock;
private final void invalidateByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1270,7 +2600,7 @@ final public class MediaCodec {
}
private final void validateInputByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1281,7 +2611,7 @@ final public class MediaCodec {
}
private final void revalidateByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
synchronized(mBufferLock) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
@@ -1293,7 +2623,7 @@ final public class MediaCodec {
}
private final void validateOutputByteBuffer(
- ByteBuffer[] buffers, int index, BufferInfo info) {
+ @Nullable ByteBuffer[] buffers, int index, @NonNull BufferInfo info) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1303,7 +2633,7 @@ final public class MediaCodec {
}
}
- private final void invalidateByteBuffers(ByteBuffer[] buffers) {
+ private final void invalidateByteBuffers(@Nullable ByteBuffer[] buffers) {
if (buffers != null) {
for (ByteBuffer buffer: buffers) {
if (buffer != null) {
@@ -1313,14 +2643,14 @@ final public class MediaCodec {
}
}
- private final void freeByteBuffer(ByteBuffer buffer) {
+ private final void freeByteBuffer(@Nullable ByteBuffer buffer) {
if (buffer != null /* && buffer.isDirect() */) {
// all of our ByteBuffers are direct
java.nio.NioUtils.freeDirectBuffer(buffer);
}
}
- private final void freeByteBuffers(ByteBuffer[] buffers) {
+ private final void freeByteBuffers(@Nullable ByteBuffer[] buffers) {
if (buffers != null) {
for (ByteBuffer buffer: buffers) {
freeByteBuffer(buffer);
@@ -1363,13 +2693,14 @@ final public class MediaCodec {
* @deprecated Use the new {@link #getInputBuffer} method instead
* each time an input buffer is dequeued.
*
- * <b>Note:</b>As of API 21, dequeued input buffers are
+ * <b>Note:</b> As of API 21, dequeued input buffers are
* automatically {@link java.nio.Buffer#clear cleared}.
*
* @throws IllegalStateException if not in the Executing state,
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public ByteBuffer[] getInputBuffers() {
if (mCachedInputBuffers == null) {
throw new IllegalStateException();
@@ -1390,7 +2721,7 @@ final public class MediaCodec {
* each time an output buffer is dequeued. This method is not
* supported if codec is configured in asynchronous mode.
*
- * <b>Note:</b>As of API 21, the position and limit of output
+ * <b>Note:</b> As of API 21, the position and limit of output
* buffers that are dequeued will be set to the valid data
* range.
*
@@ -1398,6 +2729,7 @@ final public class MediaCodec {
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public ByteBuffer[] getOutputBuffers() {
if (mCachedOutputBuffers == null) {
throw new IllegalStateException();
@@ -1424,6 +2756,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public ByteBuffer getInputBuffer(int index) {
ByteBuffer newBuffer = getBuffer(true /* input */, index);
synchronized(mBufferLock) {
@@ -1452,6 +2785,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public Image getInputImage(int index) {
Image newImage = getImage(true /* input */, index);
synchronized(mBufferLock) {
@@ -1480,6 +2814,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public ByteBuffer getOutputBuffer(int index) {
ByteBuffer newBuffer = getBuffer(false /* input */, index);
synchronized(mBufferLock) {
@@ -1507,6 +2842,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public Image getOutputImage(int index) {
Image newImage = getImage(false /* input */, index);
synchronized(mBufferLock) {
@@ -1527,19 +2863,28 @@ final public class MediaCodec {
*/
public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+ /** @hide */
+ @IntDef({
+ VIDEO_SCALING_MODE_SCALE_TO_FIT,
+ VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoScalingMode {}
+
/**
* If a surface has been specified in a previous call to {@link #configure}
* specifies the scaling mode to use. The default is "scale to fit".
* @throws IllegalArgumentException if mode is not recognized.
- * @throws IllegalStateException if in the Uninitialized state.
+ * @throws IllegalStateException if in the Released state.
*/
- public native final void setVideoScalingMode(int mode);
+ public native final void setVideoScalingMode(@VideoScalingMode int mode);
/**
* Get the component name. If the codec was created by createDecoderByType
* or createEncoderByType, what component is chosen is not known beforehand.
- * @throws IllegalStateException if in the Uninitialized state.
+ * @throws IllegalStateException if in the Released state.
*/
+ @NonNull
public native final String getName();
/**
@@ -1567,9 +2912,12 @@ final public class MediaCodec {
/**
* Communicate additional parameter changes to the component instance.
- * @throws IllegalStateException if in the Uninitialized state.
+ * <b>Note:</b> Some of these parameter changes may silently fail to apply.
+ *
+ * @param params The bundle of parameters to set.
+ * @throws IllegalStateException if in the Released state.
*/
- public final void setParameters(Bundle params) {
+ public final void setParameters(@Nullable Bundle params) {
if (params == null) {
return;
}
@@ -1601,22 +2949,124 @@ final public class MediaCodec {
* {@code flush}, you must call {@link #start} to "resume" receiving input buffers,
* even if an input surface was created.
*
- * @param cb The callback that will run.
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @param handler Callbacks will happen on the handler's thread. If {@code null},
+ * callbacks are done on the default thread (the caller's thread or the
+ * main thread.)
*/
- public void setCallback(/* MediaCodec. */ Callback cb) {
- if (mEventHandler != null) {
- // set java callback on handler
- Message msg = mEventHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb);
- mEventHandler.sendMessage(msg);
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb, @Nullable Handler handler) {
+ if (cb != null) {
+ synchronized (mListenerLock) {
+ EventHandler newHandler = getEventHandlerOn(handler, mCallbackHandler);
+ // NOTE: there are no callbacks on the handler at this time, but check anyways
+ // even if we were to extend this to be callable dynamically, it must
+ // be called when codec is flushed, so no messages are pending.
+ if (newHandler != mCallbackHandler) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ mCallbackHandler = newHandler;
+ }
+ }
+ } else if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+
+ if (mCallbackHandler != null) {
+ // set java callback on main handler
+ Message msg = mCallbackHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb);
+ mCallbackHandler.sendMessage(msg);
// set native handler here, don't post to handler because
- // it may cause the callback to be delayed and set in a wrong state,
- // and MediaCodec is already doing it on looper.
+ // it may cause the callback to be delayed and set in a wrong state.
+ // Note that native codec may start sending events to the callback
+ // handler after this returns.
native_setCallback(cb);
}
}
/**
+ * Sets an asynchronous callback for actionable MediaCodec events on the default
+ * looper.
+ * <p>
+ * Same as {@link #setCallback(Callback, Handler)} with handler set to null.
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @see #setCallback(Callback, Handler)
+ */
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb) {
+ setCallback(cb, null /* handler */);
+ }
+
+ /**
+ * Listener to be called when an output frame has rendered on the output surface
+ *
+ * @see MediaCodec#setOnFrameRenderedListener
+ */
+ public interface OnFrameRenderedListener {
+
+ /**
+ * Called when an output frame has rendered on the output surface.
+ *
+ * @param codec the MediaCodec instance
+ * @param presentationTimeUs the presentation time (media time) of the frame rendered.
+ * This is usually the same as specified in {@link #queueInputBuffer}; however,
+ * some codecs may alter the media time by applying some time-based transformation,
+ * such as frame rate conversion. In that case, presentation time corresponds
+ * to the actual output frame rendered.
+ * @param nanoTime The system time when the frame was rendered.
+ *
+ * @see System#nanoTime
+ */
+ public void onFrameRendered(
+ @NonNull MediaCodec codec, long presentationTimeUs, long nanoTime);
+ }
+
+ /**
+ * Register a callback to be invoked when an output frame is rendered on the output surface.
+ * <p>
+ * This method can be called in any codec state, but will only have an effect in the
+ * Executing state for codecs that render buffers to the output surface.
+ *
+ * @param listener the callback that will be run
+ * @param handler the callback will be run on the handler's thread. If {@code null},
+ * the callback will be run on the default thread, which is the looper
+ * from which the codec was created, or a new thread if there was none.
+ */
+ public void setOnFrameRenderedListener(
+ @Nullable OnFrameRenderedListener listener, @Nullable Handler handler) {
+ synchronized (mListenerLock) {
+ mOnFrameRenderedListener = listener;
+ if (listener != null) {
+ EventHandler newHandler = getEventHandlerOn(handler, mOnFrameRenderedHandler);
+ if (newHandler != mOnFrameRenderedHandler) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ mOnFrameRenderedHandler = newHandler;
+ } else if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ }
+ }
+
+ private EventHandler getEventHandlerOn(
+ @Nullable Handler handler, @NonNull EventHandler lastHandler) {
+ if (handler == null) {
+ return mEventHandler;
+ } else {
+ Looper looper = handler.getLooper();
+ if (lastHandler.getLooper() == looper) {
+ return lastHandler;
+ } else {
+ return new EventHandler(this, looper);
+ }
+ }
+ }
+
+ /**
* MediaCodec callback interface. Used to notify the user asynchronously
* of various MediaCodec events.
*/
@@ -1627,7 +3077,7 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param index The index of the available input buffer.
*/
- public abstract void onInputBufferAvailable(MediaCodec codec, int index);
+ public abstract void onInputBufferAvailable(@NonNull MediaCodec codec, int index);
/**
* Called when an output buffer becomes available.
@@ -1636,7 +3086,8 @@ final public class MediaCodec {
* @param index The index of the available output buffer.
* @param info Info regarding the available output buffer {@link MediaCodec.BufferInfo}.
*/
- public abstract void onOutputBufferAvailable(MediaCodec codec, int index, BufferInfo info);
+ public abstract void onOutputBufferAvailable(
+ @NonNull MediaCodec codec, int index, @NonNull BufferInfo info);
/**
* Called when the MediaCodec encountered an error
@@ -1644,7 +3095,7 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param e The {@link MediaCodec.CodecException} object describing the error.
*/
- public abstract void onError(MediaCodec codec, CodecException e);
+ public abstract void onError(@NonNull MediaCodec codec, @NonNull CodecException e);
/**
* Called when the output format has changed
@@ -1652,39 +3103,52 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param format The new output format.
*/
- public abstract void onOutputFormatChanged(MediaCodec codec, MediaFormat format);
+ public abstract void onOutputFormatChanged(
+ @NonNull MediaCodec codec, @NonNull MediaFormat format);
}
private void postEventFromNative(
- int what, int arg1, int arg2, Object obj) {
- if (mEventHandler != null) {
- Message msg = mEventHandler.obtainMessage(what, arg1, arg2, obj);
- mEventHandler.sendMessage(msg);
+ int what, int arg1, int arg2, @Nullable Object obj) {
+ synchronized (mListenerLock) {
+ EventHandler handler = mEventHandler;
+ if (what == EVENT_CALLBACK) {
+ handler = mCallbackHandler;
+ } else if (what == EVENT_FRAME_RENDERED) {
+ handler = mOnFrameRenderedHandler;
+ }
+ if (handler != null) {
+ Message msg = handler.obtainMessage(what, arg1, arg2, obj);
+ handler.sendMessage(msg);
+ }
}
}
- private native final void setParameters(String[] keys, Object[] values);
+ private native final void setParameters(@NonNull String[] keys, @NonNull Object[] values);
/**
* Get the codec info. If the codec was created by createDecoderByType
* or createEncoderByType, what component is chosen is not known beforehand,
* and thus the caller does not have the MediaCodecInfo.
- * @throws IllegalStateException if in the Uninitialized state.
+ * @throws IllegalStateException if in the Released state.
*/
+ @NonNull
public MediaCodecInfo getCodecInfo() {
return MediaCodecList.getInfoFor(getName());
}
+ @NonNull
private native final ByteBuffer[] getBuffers(boolean input);
+ @Nullable
private native final ByteBuffer getBuffer(boolean input, int index);
+ @Nullable
private native final Image getImage(boolean input, int index);
private static native final void native_init();
private native final void native_setup(
- String name, boolean nameIsType, boolean encoder);
+ @NonNull String name, boolean nameIsType, boolean encoder);
private native final void native_finalize();
@@ -1698,7 +3162,6 @@ final public class MediaCodec {
/** @hide */
public static class MediaImage extends Image {
private final boolean mIsReadOnly;
- private boolean mIsValid;
private final int mWidth;
private final int mHeight;
private final int mFormat;
@@ -1711,35 +3174,42 @@ final public class MediaCodec {
private final static int TYPE_YUV = 1;
+ @Override
public int getFormat() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mFormat;
}
+ @Override
public int getHeight() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mHeight;
}
+ @Override
public int getWidth() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mWidth;
}
+ @Override
public long getTimestamp() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mTimestamp;
}
+ @Override
+ @NonNull
public Plane[] getPlanes() {
- checkValid();
+ throwISEIfImageIsInvalid();
return Arrays.copyOf(mPlanes, mPlanes.length);
}
+ @Override
public void close() {
- if (mIsValid) {
+ if (mIsImageValid) {
java.nio.NioUtils.freeDirectBuffer(mBuffer);
- mIsValid = false;
+ mIsImageValid = false;
}
}
@@ -1749,20 +3219,16 @@ final public class MediaCodec {
* The crop rectangle specifies the region of valid pixels in the image,
* using coordinates in the largest-resolution plane.
*/
- public void setCropRect(Rect cropRect) {
+ @Override
+ public void setCropRect(@Nullable Rect cropRect) {
if (mIsReadOnly) {
throw new ReadOnlyBufferException();
}
super.setCropRect(cropRect);
}
- private void checkValid() {
- if (!mIsValid) {
- throw new IllegalStateException("Image is already released");
- }
- }
- private int readInt(ByteBuffer buffer, boolean asLong) {
+ private int readInt(@NonNull ByteBuffer buffer, boolean asLong) {
if (asLong) {
return (int)buffer.getLong();
} else {
@@ -1771,11 +3237,11 @@ final public class MediaCodec {
}
public MediaImage(
- ByteBuffer buffer, ByteBuffer info, boolean readOnly,
- long timestamp, int xOffset, int yOffset, Rect cropRect) {
+ @NonNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly,
+ long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect) {
mFormat = ImageFormat.YUV_420_888;
mTimestamp = timestamp;
- mIsValid = true;
+ mIsImageValid = true;
mIsReadOnly = buffer.isReadOnly();
mBuffer = buffer.duplicate();
@@ -1838,7 +3304,7 @@ final public class MediaCodec {
}
private class MediaPlane extends Plane {
- public MediaPlane(ByteBuffer buffer, int rowInc, int colInc) {
+ public MediaPlane(@NonNull ByteBuffer buffer, int rowInc, int colInc) {
mData = buffer;
mRowInc = rowInc;
mColInc = colInc;
@@ -1846,19 +3312,20 @@ final public class MediaCodec {
@Override
public int getRowStride() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mRowInc;
}
@Override
public int getPixelStride() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mColInc;
}
@Override
+ @NonNull
public ByteBuffer getBuffer() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mData;
}
diff --git a/media/java/android/media/MediaCodecInfo.java b/media/java/android/media/MediaCodecInfo.java
index ca5c9ce..89d419a 100644
--- a/media/java/android/media/MediaCodecInfo.java
+++ b/media/java/android/media/MediaCodecInfo.java
@@ -24,15 +24,12 @@ import android.util.Size;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static android.media.Utils.intersectSortedDistinctRanges;
import static android.media.Utils.sortDistinctRanges;
-import static com.android.internal.util.Preconditions.checkArgumentPositive;
-import static com.android.internal.util.Preconditions.checkNotNull;
/**
* Provides information about a given media codec available on the device. You can
@@ -127,6 +124,8 @@ public final class MediaCodecInfo {
private static final Range<Integer> SIZE_RANGE = Range.create(1, 32768);
private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960);
private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000);
+ private static final int DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
+ private static final int MAX_SUPPORTED_INSTANCES_LIMIT = 256;
// found stuff that is not supported by framework (=> this should not happen)
private static final int ERROR_UNRECOGNIZED = (1 << 0);
@@ -150,6 +149,7 @@ public final class MediaCodecInfo {
// CLASSIFICATION
private String mMime;
+ private int mMaxSupportedInstances;
// LEGACY FIELDS
@@ -160,56 +160,274 @@ public final class MediaCodecInfo {
public CodecProfileLevel[] profileLevels; // NOTE this array is modifiable by user
// from OMX_COLOR_FORMATTYPE
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_FormatMonochrome = 1;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format8bitRGB332 = 2;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format12bitRGB444 = 3;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format16bitARGB4444 = 4;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format16bitARGB1555 = 5;
+
+ /**
+ * 16 bits per pixel RGB color format, with 5-bit red & blue and 6-bit green component.
+ * <p>
+ * Using 16-bit little-endian representation, colors stored as Red 15:11, Green 10:5, Blue 4:0.
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | BLUE | GREEN | RED |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 4 5 7 0 2 3 7
+ * bit
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_565} and
+ * {@link android.graphics.ImageFormat#RGB_565}.
+ */
public static final int COLOR_Format16bitRGB565 = 6;
+ /** @deprecated Use {@link #COLOR_Format16bitRGB565}. */
public static final int COLOR_Format16bitBGR565 = 7;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format18bitRGB666 = 8;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format18bitARGB1665 = 9;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format19bitARGB1666 = 10;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888} or {@link #COLOR_FormatRGBFlexible}. */
public static final int COLOR_Format24bitRGB888 = 11;
+
+ /**
+ * 24 bits per pixel RGB color format, with 8-bit red, green & blue components.
+ * <p>
+ * Using 24-bit little-endian representation, colors stored as Red 7:0, Green 15:8, Blue 23:16.
+ * <pre>
+ * byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----->
+ * +-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE |
+ * +-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_888}, and can also be
+ * represented as a flexible format by {@link #COLOR_FormatRGBFlexible}.
+ */
public static final int COLOR_Format24bitBGR888 = 12;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24bitARGB1887 = 13;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format25bitARGB1888 = 14;
+
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
public static final int COLOR_Format32bitBGRA8888 = 15;
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
public static final int COLOR_Format32bitARGB8888 = 16;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV411Planar = 17;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV411PackedPlanar = 18;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420Planar = 19;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420PackedPlanar = 20;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420SemiPlanar = 21;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422Planar = 22;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422PackedPlanar = 23;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422SemiPlanar = 24;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYCbYCr = 25;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYCrYCb = 26;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatCbYCrY = 27;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatCrYCbY = 28;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV444Flexible}. */
public static final int COLOR_FormatYUV444Interleaved = 29;
+
+ /**
+ * SMIA 8-bit Bayer format.
+ * Each byte represents the top 8-bits of a 10-bit signal.
+ */
public static final int COLOR_FormatRawBayer8bit = 30;
+ /**
+ * SMIA 10-bit Bayer format.
+ */
public static final int COLOR_FormatRawBayer10bit = 31;
+
+ /**
+ * SMIA 8-bit compressed Bayer format.
+ * Each byte represents a sample from the 10-bit signal that is compressed into 8-bits
+ * using DPCM/PCM compression, as defined by the SMIA Functional Specification.
+ */
public static final int COLOR_FormatRawBayer8bitcompressed = 32;
+
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
public static final int COLOR_FormatL2 = 33;
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
public static final int COLOR_FormatL4 = 34;
+
+ /**
+ * 8 bits per pixel Y color format.
+ * <p>
+ * Each byte contains a single pixel.
+ * This format corresponds to {@link android.graphics.PixelFormat#L_8}.
+ */
public static final int COLOR_FormatL8 = 35;
+
+ /**
+ * 16 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | Y |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 7 0 7
+ * bit
+ * </pre>
+ */
public static final int COLOR_FormatL16 = 36;
+ /** @deprecated Use {@link #COLOR_FormatL16}. */
public static final int COLOR_FormatL24 = 37;
+
+ /**
+ * 32 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | Y |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * 0 7 0 7 0 7 0 7
+ * bit
+ * </pre>
+ *
+ * @deprecated Use {@link #COLOR_FormatL16}.
+ */
public static final int COLOR_FormatL32 = 38;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420PackedSemiPlanar = 39;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422PackedSemiPlanar = 40;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format18BitBGR666 = 41;
+
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24BitARGB6666 = 42;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24BitABGR6666 = 43;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
// COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference.
// In OMX this is called OMX_COLOR_FormatAndroidOpaque.
public static final int COLOR_FormatSurface = 0x7F000789;
- // This corresponds to YUV_420_888 format
+
+ /**
+ * 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components.
+ * <p>
+ * Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8,
+ * Blue 23:16, and Alpha 31:24.
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE | ALPHA |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This corresponds to {@link android.graphics.PixelFormat#RGBA_8888}.
+ */
+ public static final int COLOR_Format32bitABGR8888 = 0x7F00A000;
+
+ /**
+ * Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are subsampled by 2 both horizontally and vertically.
+ * Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_420_888},
+ * and can represent the {@link #COLOR_FormatYUV411Planar},
+ * {@link #COLOR_FormatYUV411PackedPlanar}, {@link #COLOR_FormatYUV420Planar},
+ * {@link #COLOR_FormatYUV420PackedPlanar}, {@link #COLOR_FormatYUV420SemiPlanar}
+ * and {@link #COLOR_FormatYUV420PackedSemiPlanar} formats.
+ *
+ * @see Image#getFormat
+ */
public static final int COLOR_FormatYUV420Flexible = 0x7F420888;
+
+ /**
+ * Flexible 16 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are horizontally subsampled by 2. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_422_888},
+ * and can represent the {@link #COLOR_FormatYCbYCr}, {@link #COLOR_FormatYCrYCb},
+ * {@link #COLOR_FormatCbYCrY}, {@link #COLOR_FormatCrYCbY},
+ * {@link #COLOR_FormatYUV422Planar}, {@link #COLOR_FormatYUV422PackedPlanar},
+ * {@link #COLOR_FormatYUV422SemiPlanar} and {@link #COLOR_FormatYUV422PackedSemiPlanar}
+ * formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV422Flexible = 0x7F422888;
+
+ /**
+ * Flexible 24 bits per pixel YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are not subsampled. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_444_888},
+ * and can represent the {@link #COLOR_FormatYUV444Interleaved} format.
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV444Flexible = 0x7F444888;
+
+ /**
+ * Flexible 24 bits per pixel RGB color format with 8-bit red, green and blue
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGB_888}, and can represent
+ * {@link #COLOR_Format24bitBGR888} and {@link #COLOR_Format24bitRGB888} formats.
+ * @see Image#getFormat.
+ */
+ public static final int COLOR_FormatRGBFlexible = 0x7F36B888;
+
+ /**
+ * Flexible 32 bits per pixel RGBA color format with 8-bit red, green, blue, and alpha
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGBA_8888}, and can represent
+ * {@link #COLOR_Format32bitBGRA8888}, {@link #COLOR_Format32bitABGR8888} and
+ * {@link #COLOR_Format32bitARGB8888} formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatRGBAFlexible = 0x7F36A888;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
/**
@@ -368,6 +586,18 @@ public final class MediaCodecInfo {
return mMime;
}
+ /**
+ * Returns the max number of the supported concurrent codec instances.
+ * <p>
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ */
+ public int getMaxSupportedInstances() {
+ return mMaxSupportedInstances;
+ }
+
private boolean isAudio() {
return mAudioCaps != null;
}
@@ -469,6 +699,15 @@ public final class MediaCodecInfo {
mEncoderCaps.setDefaultFormat(mDefaultFormat);
}
+ final Map<String, Object> global = MediaCodecList.getGlobalSettings();
+ mMaxSupportedInstances = Utils.parseIntSafely(
+ global.get("max-supported-instances"), DEFAULT_MAX_SUPPORTED_INSTANCES);
+
+ int maxInstances = Utils.parseIntSafely(
+ map.get("max-supported-instances"), mMaxSupportedInstances);
+ mMaxSupportedInstances =
+ Range.create(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
+
for (Feature feat: getValidFeatures()) {
String key = MediaFormat.KEY_FEATURE_ + feat.mName;
Integer yesNo = (Integer)map.get(key);
@@ -880,8 +1119,8 @@ public final class MediaCodecInfo {
(int)(mAspectRatioRange.getUpper().doubleValue() * height));
return range;
} catch (IllegalArgumentException e) {
- // should not be here
- Log.w(TAG, "could not get supported widths for " + height , e);
+ // height is not supported because there are no suitable widths
+ Log.v(TAG, "could not get supported widths for " + height);
throw new IllegalArgumentException("unsupported height");
}
}
@@ -924,8 +1163,8 @@ public final class MediaCodecInfo {
(int)(width / mAspectRatioRange.getLower().doubleValue()));
return range;
} catch (IllegalArgumentException e) {
- // should not be here
- Log.w(TAG, "could not get supported heights for " + width , e);
+ // width is not supported because there are no suitable heights
+ Log.v(TAG, "could not get supported heights for " + width);
throw new IllegalArgumentException("unsupported width");
}
}
@@ -957,6 +1196,27 @@ public final class MediaCodecInfo {
}
/**
+ * Returns the range of achievable video frame rates for a video size.
+ * May return {@code null}, if the codec did not publish any measurement
+ * data.
+ * <p>
+ * This is a performance estimate, based on full-speed decoding
+ * and encoding measurements of common video sizes supported by the codec.
+ *
+ * @param width the width of the video
+ * @param height the height of the video
+ *
+ * @throws IllegalArgumentException if the video size is not supported.
+ */
+ public Range<Double> getAchievableFrameRatesFor(int width, int height) {
+ if (!supports(width, height, null)) {
+ throw new IllegalArgumentException("unsupported size");
+ }
+ // TODO: get this data from the codec
+ return null;
+ }
+
+ /**
* Returns whether a given video size ({@code width} and
* {@code height}) and {@code frameRate} combination is supported.
*/
@@ -1473,6 +1733,72 @@ public final class MediaCodecInfo {
maxBlocks, maxBlocksPerSecond,
16 /* blockWidth */, 16 /* blockHeight */,
1 /* widthAlignment */, 1 /* heightAlignment */);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG2)) {
+ int maxWidth = 11, maxHeight = 9, maxRate = 15;
+ maxBlocks = 99;
+ maxBlocksPerSecond = 1485;
+ maxBps = 64000;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ int MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0;
+ boolean supported = true;
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.MPEG2ProfileSimple:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG2LevelML:
+ FR = 30; W = 45; H = 36; MBPS = 48600; FS = 1620; BR = 15000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG2ProfileMain:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG2LevelLL:
+ FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 4000; break;
+ case CodecProfileLevel.MPEG2LevelML:
+ FR = 30; W = 45; H = 36; MBPS = 48600; FS = 1620; BR = 15000; break;
+ case CodecProfileLevel.MPEG2LevelH14:
+ FR = 60; W = 90; H = 68; MBPS = 367200; FS = 6120; BR = 60000; break;
+ case CodecProfileLevel.MPEG2LevelHL:
+ FR = 60; W = 120; H = 68; MBPS = 489600; FS = 8160; BR = 80000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG2Profile422:
+ case CodecProfileLevel.MPEG2ProfileSNR:
+ case CodecProfileLevel.MPEG2ProfileSpatial:
+ case CodecProfileLevel.MPEG2ProfileHigh:
+ Log.i(TAG, "Unsupported profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNSUPPORTED;
+ supported = false;
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ if (supported) {
+ errors &= ~ERROR_NONE_SUPPORTED;
+ }
+ maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR * 1000, maxBps);
+ maxWidth = Math.max(W, maxWidth);
+ maxHeight = Math.max(H, maxHeight);
+ maxRate = Math.max(FR, maxRate);
+ }
+ applyMacroBlockLimits(maxWidth, maxHeight,
+ maxBlocks, maxBlocksPerSecond,
+ 16 /* blockWidth */, 16 /* blockHeight */,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ mFrameRateRange = mFrameRateRange.intersect(12, maxRate);
} else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
int maxWidth = 11, maxHeight = 9, maxRate = 15;
maxBlocks = 99;
@@ -2083,6 +2409,20 @@ public final class MediaCodecInfo {
public static final int MPEG4Level4a = 0x40;
public static final int MPEG4Level5 = 0x80;
+ // from OMX_VIDEO_MPEG2PROFILETYPE
+ public static final int MPEG2ProfileSimple = 0x00;
+ public static final int MPEG2ProfileMain = 0x01;
+ public static final int MPEG2Profile422 = 0x02;
+ public static final int MPEG2ProfileSNR = 0x03;
+ public static final int MPEG2ProfileSpatial = 0x04;
+ public static final int MPEG2ProfileHigh = 0x05;
+
+ // from OMX_VIDEO_MPEG2LEVELTYPE
+ public static final int MPEG2LevelLL = 0x00;
+ public static final int MPEG2LevelML = 0x01;
+ public static final int MPEG2LevelH14 = 0x02;
+ public static final int MPEG2LevelHL = 0x03;
+
// from OMX_AUDIO_AACPROFILETYPE
public static final int AACObjectMain = 1;
public static final int AACObjectLC = 2;
diff --git a/media/java/android/media/MediaCodecList.java b/media/java/android/media/MediaCodecList.java
index bb848d9..f44e048 100644
--- a/media/java/android/media/MediaCodecList.java
+++ b/media/java/android/media/MediaCodecList.java
@@ -19,10 +19,9 @@ package android.media;
import android.util.Log;
import android.media.MediaCodecInfo;
-import android.media.MediaCodecInfo.CodecCapabilities;
-
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Map;
/**
* Allows you to enumerate available codecs, each specified as a {@link MediaCodecInfo} object,
@@ -63,13 +62,19 @@ final public class MediaCodecList {
return sRegularCodecInfos[index];
}
+ /* package private */ static final Map<String, Object> getGlobalSettings() {
+ return sGlobalSettings;
+ }
+
private static Object sInitLock = new Object();
private static MediaCodecInfo[] sAllCodecInfos;
private static MediaCodecInfo[] sRegularCodecInfos;
+ private static Map<String, Object> sGlobalSettings;
private static final void initCodecList() {
synchronized (sInitLock) {
if (sRegularCodecInfos == null) {
+ sGlobalSettings = native_getGlobalSettings();
int count = native_getCodecCount();
ArrayList<MediaCodecInfo> regulars = new ArrayList<MediaCodecInfo>();
ArrayList<MediaCodecInfo> all = new ArrayList<MediaCodecInfo>();
@@ -114,6 +119,8 @@ final public class MediaCodecList {
/* package private */ static native final MediaCodecInfo.CodecCapabilities
getCodecCapabilities(int index, String type);
+ /* package private */ static native final Map<String, Object> native_getGlobalSettings();
+
/* package private */ static native final int findCodecByName(String codec);
/** @hide */
diff --git a/media/java/android/media/MediaCrypto.java b/media/java/android/media/MediaCrypto.java
index c7c3fc2..474d8b9 100644
--- a/media/java/android/media/MediaCrypto.java
+++ b/media/java/android/media/MediaCrypto.java
@@ -16,6 +16,7 @@
package android.media;
+import android.annotation.NonNull;
import android.media.MediaCryptoException;
import java.util.UUID;
@@ -34,11 +35,12 @@ public final class MediaCrypto {
* this device.
* @param uuid The UUID of the crypto scheme.
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid) {
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid));
}
- private static final byte[] getByteArrayFromUUID(UUID uuid) {
+ @NonNull
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
long msb = uuid.getMostSignificantBits();
long lsb = uuid.getLeastSignificantBits();
@@ -51,7 +53,7 @@ public final class MediaCrypto {
return uuidBytes;
}
- private static final native boolean isCryptoSchemeSupportedNative(byte[] uuid);
+ private static final native boolean isCryptoSchemeSupportedNative(@NonNull byte[] uuid);
/**
* Instantiate a MediaCrypto object using opaque, crypto scheme specific
@@ -59,7 +61,7 @@ public final class MediaCrypto {
* @param uuid The UUID of the crypto scheme.
* @param initData Opaque initialization data specific to the crypto scheme.
*/
- public MediaCrypto(UUID uuid, byte[] initData) throws MediaCryptoException {
+ public MediaCrypto(@NonNull UUID uuid, @NonNull byte[] initData) throws MediaCryptoException {
native_setup(getByteArrayFromUUID(uuid), initData);
}
@@ -68,7 +70,21 @@ public final class MediaCrypto {
* to decode data of the given mime type.
* @param mime The mime type of the media data
*/
- public final native boolean requiresSecureDecoderComponent(String mime);
+ public final native boolean requiresSecureDecoderComponent(@NonNull String mime);
+
+ /**
+ * Associate a MediaDrm session with this MediaCrypto instance. The
+ * MediaDrm session is used to securely load decryption keys for a
+ * crypto scheme. The crypto keys loaded through the MediaDrm session
+ * may be selected for use during the decryption operation performed
+ * by {@link android.media.MediaCodec#queueSecureInputBuffer} by specifying
+ * their key ids in the {@link android.media.MediaCodec.CryptoInfo#key} field.
+ * @param sessionId the MediaDrm sessionId to associate with this
+ * MediaCrypto instance
+ * @throws MediaCryptoException on failure to set the sessionId
+ */
+ public final native void setMediaDrmSession(@NonNull byte[] sessionId)
+ throws MediaCryptoException;
@Override
protected void finalize() {
@@ -78,7 +94,7 @@ public final class MediaCrypto {
public native final void release();
private static native final void native_init();
- private native final void native_setup(byte[] uuid, byte[] initData)
+ private native final void native_setup(@NonNull byte[] uuid, @NonNull byte[] initData)
throws MediaCryptoException;
private native final void native_finalize();
diff --git a/media/java/android/media/MediaCryptoException.java b/media/java/android/media/MediaCryptoException.java
index 44c5222..32ddf47 100644
--- a/media/java/android/media/MediaCryptoException.java
+++ b/media/java/android/media/MediaCryptoException.java
@@ -16,12 +16,14 @@
package android.media;
+import android.annotation.Nullable;
+
/**
- * Exception thrown if MediaCrypto object could not be instantiated for
- * whatever reason.
+ * Exception thrown if MediaCrypto object could not be instantiated or
+ * if unable to perform an operation on the MediaCrypto object.
*/
public final class MediaCryptoException extends Exception {
- public MediaCryptoException(String detailMessage) {
+ public MediaCryptoException(@Nullable String detailMessage) {
super(detailMessage);
}
}
diff --git a/media/java/android/media/MediaDataSource.java b/media/java/android/media/MediaDataSource.java
new file mode 100644
index 0000000..948da0b
--- /dev/null
+++ b/media/java/android/media/MediaDataSource.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * For supplying media data to the framework. Implement this if your app has
+ * special requirements for the way media data is obtained.
+ *
+ * <p class="note">Methods of this interface may be called on multiple different
+ * threads. There will be a thread synchronization point between each call to ensure that
+ * modifications to the state of your MediaDataSource are visible to future calls. This means
+ * you don't need to do your own synchronization unless you're modifying the
+ * MediaDataSource from another thread while it's being used by the framework.</p>
+ */
+public abstract class MediaDataSource implements Closeable {
+ /**
+ * Called to request data from the given position.
+ *
+ * Implementations should should write up to {@code size} bytes into
+ * {@code buffer}, and return the number of bytes written.
+ *
+ * Return {@code 0} if size is zero (thus no bytes are read).
+ *
+ * Return {@code -1} to indicate that end of stream is reached.
+ *
+ * @param position the position in the data source to read from.
+ * @param buffer the buffer to read the data into.
+ * @param offset the offset within buffer to read the data into.
+ * @param size the number of bytes to read.
+ * @throws IOException on fatal errors.
+ * @return the number of bytes read, or -1 if there was an error.
+ */
+ public abstract int readAt(long position, byte[] buffer, int offset, int size)
+ throws IOException;
+
+ /**
+ * Called to get the size of the data source.
+ *
+ * @throws IOException on fatal errors
+ * @return the size of data source in bytes, or -1 if the size is unknown.
+ */
+ public abstract long getSize() throws IOException;
+}
diff --git a/media/java/android/media/MediaDescription.java b/media/java/android/media/MediaDescription.java
index 4399c0d..afc3ca7 100644
--- a/media/java/android/media/MediaDescription.java
+++ b/media/java/android/media/MediaDescription.java
@@ -1,22 +1,11 @@
package android.media;
-import android.annotation.NonNull;
import android.annotation.Nullable;
-import android.content.ContentResolver;
-import android.content.res.Resources;
import android.graphics.Bitmap;
-import android.graphics.Canvas;
-import android.graphics.Paint;
-import android.graphics.Point;
-import android.graphics.RectF;
-import android.graphics.drawable.BitmapDrawable;
-import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
-import android.os.CancellationSignal;
import android.os.Parcel;
import android.os.Parcelable;
-import android.util.Size;
/**
* A simple set of metadata for a media item suitable for display. This can be
@@ -52,9 +41,13 @@ public class MediaDescription implements Parcelable {
* Extras for opaque use by apps/system.
*/
private final Bundle mExtras;
+ /**
+ * A Uri to identify this content.
+ */
+ private final Uri mMediaUri;
private MediaDescription(String mediaId, CharSequence title, CharSequence subtitle,
- CharSequence description, Bitmap icon, Uri iconUri, Bundle extras) {
+ CharSequence description, Bitmap icon, Uri iconUri, Bundle extras, Uri mediaUri) {
mMediaId = mediaId;
mTitle = title;
mSubtitle = subtitle;
@@ -62,6 +55,7 @@ public class MediaDescription implements Parcelable {
mIcon = icon;
mIconUri = iconUri;
mExtras = extras;
+ mMediaUri = mediaUri;
}
private MediaDescription(Parcel in) {
@@ -72,6 +66,7 @@ public class MediaDescription implements Parcelable {
mIcon = in.readParcelable(null);
mIconUri = in.readParcelable(null);
mExtras = in.readBundle();
+ mMediaUri = in.readParcelable(null);
}
/**
@@ -136,6 +131,15 @@ public class MediaDescription implements Parcelable {
return mExtras;
}
+ /**
+ * Returns a Uri representing this content or null.
+ *
+ * @return A media Uri or null.
+ */
+ public @Nullable Uri getMediaUri() {
+ return mMediaUri;
+ }
+
@Override
public int describeContents() {
return 0;
@@ -150,6 +154,7 @@ public class MediaDescription implements Parcelable {
dest.writeParcelable(mIcon, flags);
dest.writeParcelable(mIconUri, flags);
dest.writeBundle(mExtras);
+ dest.writeParcelable(mMediaUri, flags);
}
@Override
@@ -181,6 +186,7 @@ public class MediaDescription implements Parcelable {
private Bitmap mIcon;
private Uri mIconUri;
private Bundle mExtras;
+ private Uri mMediaUri;
/**
* Creates an initially empty builder.
@@ -268,9 +274,20 @@ public class MediaDescription implements Parcelable {
return this;
}
+ /**
+ * Sets the media uri.
+ *
+ * @param mediaUri The content's {@link Uri} for the item or null.
+ * @return this
+ */
+ public Builder setMediaUri(@Nullable Uri mediaUri) {
+ mMediaUri = mediaUri;
+ return this;
+ }
+
public MediaDescription build() {
return new MediaDescription(mMediaId, mTitle, mSubtitle, mDescription, mIcon, mIconUri,
- mExtras);
+ mExtras, mMediaUri);
}
}
}
diff --git a/media/java/android/media/MediaDrm.java b/media/java/android/media/MediaDrm.java
index 78a5abe..9acfee2 100644
--- a/media/java/android/media/MediaDrm.java
+++ b/media/java/android/media/MediaDrm.java
@@ -16,13 +16,18 @@
package android.media;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
-import java.util.UUID;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.UUID;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.StringDef;
import android.annotation.SystemApi;
-import android.os.Binder;
-import android.os.Debug;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
@@ -84,6 +89,10 @@ import android.util.Log;
* encrypted content, the samples returned from the extractor remain encrypted, they
* are only decrypted when the samples are delivered to the decoder.
* <p>
+ * MediaDrm methods throw {@link java.lang.IllegalStateException}
+ * when a method is called on a MediaDrm object that is in an invalid or inoperable
+ * state. This is typically due to incorrect application API usage, but may also
+ * be due to an unrecoverable failure in the DRM plugin or security hardware.
* <a name="Callbacks"></a>
* <h3>Callbacks</h3>
* <p>Applications should register for informational events in order
@@ -96,12 +105,16 @@ import android.util.Log;
*/
public final class MediaDrm {
- private final static String TAG = "MediaDrm";
+ private static final String TAG = "MediaDrm";
private static final String PERMISSION = android.Manifest.permission.ACCESS_DRM_CERTIFICATES;
private EventHandler mEventHandler;
+ private EventHandler mOnKeyStatusChangeEventHandler;
+ private EventHandler mOnExpirationUpdateEventHandler;
private OnEventListener mOnEventListener;
+ private OnKeyStatusChangeListener mOnKeyStatusChangeListener;
+ private OnExpirationUpdateListener mOnExpirationUpdateListener;
private long mNativeContext;
@@ -119,12 +132,20 @@ public final class MediaDrm {
*/
public static final int CERTIFICATE_TYPE_X509 = 1;
+ /** @hide */
+ @IntDef({
+ CERTIFICATE_TYPE_NONE,
+ CERTIFICATE_TYPE_X509,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CertificateType {}
+
/**
* Query if the given scheme identified by its UUID is supported on
* this device.
* @param uuid The UUID of the crypto scheme.
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid) {
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), null);
}
@@ -136,11 +157,12 @@ public final class MediaDrm {
* @param mimeType The MIME type of the media container, e.g. "video/mp4"
* or "video/webm"
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid, String mimeType) {
+ public static final boolean isCryptoSchemeSupported(
+ @NonNull UUID uuid, @NonNull String mimeType) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), mimeType);
}
- private static final byte[] getByteArrayFromUUID(UUID uuid) {
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
long msb = uuid.getMostSignificantBits();
long lsb = uuid.getLeastSignificantBits();
@@ -153,8 +175,8 @@ public final class MediaDrm {
return uuidBytes;
}
- private static final native boolean isCryptoSchemeSupportedNative(byte[] uuid,
- String mimeType);
+ private static final native boolean isCryptoSchemeSupportedNative(
+ @NonNull byte[] uuid, @Nullable String mimeType);
/**
* Instantiate a MediaDrm object
@@ -164,7 +186,7 @@ public final class MediaDrm {
* @throws UnsupportedSchemeException if the device does not support the
* specified scheme UUID
*/
- public MediaDrm(UUID uuid) throws UnsupportedSchemeException {
+ public MediaDrm(@NonNull UUID uuid) throws UnsupportedSchemeException {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
@@ -193,7 +215,7 @@ public final class MediaDrm {
/**
* @hide
*/
- public MediaDrmStateException(int errorCode, String detailMessage) {
+ public MediaDrmStateException(int errorCode, @Nullable String detailMessage) {
super(detailMessage);
mErrorCode = errorCode;
@@ -219,17 +241,180 @@ public final class MediaDrm {
* since this string will not be localized or generally comprehensible
* to end-users.
*/
+ @NonNull
public String getDiagnosticInfo() {
return mDiagnosticInfo;
}
}
/**
+ * Register a callback to be invoked when a session expiration update
+ * occurs. The app's OnExpirationUpdateListener will be notified
+ * when the expiration time of the keys in the session have changed.
+ * @param listener the callback that will be run, or {@code null} to unregister the
+ * previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnExpirationUpdateListener(
+ @Nullable OnExpirationUpdateListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnExpirationUpdateListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when a drm session
+ * expiration update occurs
+ */
+ public interface OnExpirationUpdateListener
+ {
+ /**
+ * Called when a session expiration update occurs, to inform the app
+ * about the change in expiration time
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param expirationTime the new expiration time for the keys in the session.
+ * The time is in milliseconds, relative to the Unix epoch. A time of
+ * 0 indicates that the keys never expire.
+ */
+ void onExpirationUpdate(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId, long expirationTime);
+ }
+
+ /**
+ * Register a callback to be invoked when the state of keys in a session
+ * change, e.g. when a license update occurs or when a license expires.
+ *
+ * @param listener the callback that will be run when key status changes, or
+ * {@code null} to unregister the previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnKeyStatusChangeListener(
+ @Nullable OnKeyStatusChangeListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnKeyStatusChangeListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the keys in a drm
+ * session change states.
+ */
+ public interface OnKeyStatusChangeListener
+ {
+ /**
+ * Called when the keys in a session change status, such as when the license
+ * is renewed or expires.
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param keyInformation a list of {@link MediaDrm.KeyStatus}
+ * instances indicating the status for each key in the session
+ * @param hasNewUsableKey indicates if a key has been added that is usable,
+ * which may trigger an attempt to resume playback on the media stream
+ * if it is currently blocked waiting for a key.
+ */
+ void onKeyStatusChange(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId,
+ @NonNull List<KeyStatus> keyInformation,
+ boolean hasNewUsableKey);
+ }
+
+ /**
+ * Defines the status of a key.
+ * A KeyStatus for each key in a session is provided to the
+ * {@link OnKeyStatusChangeListener#onKeyStatusChange}
+ * listener.
+ */
+ public static final class KeyStatus {
+ private final byte[] mKeyId;
+ private final int mStatusCode;
+
+ /**
+ * The key is currently usable to decrypt media data
+ */
+ public static final int STATUS_USABLE = 0;
+
+ /**
+ * The key is no longer usable to decrypt media data because its
+ * expiration time has passed.
+ */
+ public static final int STATUS_EXPIRED = 1;
+
+ /**
+ * The key is not currently usable to decrypt media data because its
+ * output requirements cannot currently be met.
+ */
+ public static final int STATUS_OUTPUT_NOT_ALLOWED = 2;
+
+ /**
+ * The status of the key is not yet known and is being determined.
+ * The status will be updated with the actual status when it has
+ * been determined.
+ */
+ public static final int STATUS_PENDING = 3;
+
+ /**
+ * The key is not currently usable to decrypt media data because of an
+ * internal error in processing unrelated to input parameters. This error
+ * is not actionable by an app.
+ */
+ public static final int STATUS_INTERNAL_ERROR = 4;
+
+ /** @hide */
+ @IntDef({
+ STATUS_USABLE,
+ STATUS_EXPIRED,
+ STATUS_OUTPUT_NOT_ALLOWED,
+ STATUS_PENDING,
+ STATUS_INTERNAL_ERROR,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyStatusCode {}
+
+ KeyStatus(@NonNull byte[] keyId, @KeyStatusCode int statusCode) {
+ mKeyId = keyId;
+ mStatusCode = statusCode;
+ }
+
+ /**
+ * Returns the status code for the key
+ * @return one of {@link #STATUS_USABLE}, {@link #STATUS_EXPIRED},
+ * {@link #STATUS_OUTPUT_NOT_ALLOWED}, {@link #STATUS_PENDING}
+ * or {@link #STATUS_INTERNAL_ERROR}.
+ */
+ @KeyStatusCode
+ public int getStatusCode() { return mStatusCode; }
+
+ /**
+ * Returns the id for the key
+ */
+ @NonNull
+ public byte[] getKeyId() { return mKeyId; }
+ }
+
+ /**
* Register a callback to be invoked when an event occurs
*
- * @param listener the callback that will be run
+ * @param listener the callback that will be run. Use {@code null} to
+ * stop receiving event callbacks.
*/
- public void setOnEventListener(OnEventListener listener)
+ public void setOnEventListener(@Nullable OnEventListener listener)
{
mOnEventListener = listener;
}
@@ -244,18 +429,25 @@ public final class MediaDrm {
* Called when an event occurs that requires the app to be notified
*
* @param md the MediaDrm object on which the event occurred
- * @param sessionId the DRM session ID on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred,
+ * or {@code null} if there is no session ID associated with the event.
* @param event indicates the event type
* @param extra an secondary error code
* @param data optional byte array of data that may be associated with the event
*/
- void onEvent(MediaDrm md, byte[] sessionId, int event, int extra, byte[] data);
+ void onEvent(
+ @NonNull MediaDrm md, @Nullable byte[] sessionId,
+ @DrmEvent int event, int extra,
+ @Nullable byte[] data);
}
/**
* This event type indicates that the app needs to request a certificate from
* the provisioning server. The request message data is obtained using
* {@link #getProvisionRequest}
+ *
+ * @deprecated Handle provisioning via {@link android.media.NotProvisionedException}
+ * instead.
*/
public static final int EVENT_PROVISION_REQUIRED = 1;
@@ -277,19 +469,38 @@ public final class MediaDrm {
*/
public static final int EVENT_VENDOR_DEFINED = 4;
+ /**
+ * This event indicates that a session opened by the app has been reclaimed by the resource
+ * manager.
+ */
+ public static final int EVENT_SESSION_RECLAIMED = 5;
+
+ /** @hide */
+ @IntDef({
+ EVENT_PROVISION_REQUIRED,
+ EVENT_KEY_REQUIRED,
+ EVENT_KEY_EXPIRED,
+ EVENT_VENDOR_DEFINED,
+ EVENT_SESSION_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DrmEvent {}
+
private static final int DRM_EVENT = 200;
+ private static final int EXPIRATION_UPDATE = 201;
+ private static final int KEY_STATUS_CHANGE = 202;
private class EventHandler extends Handler
{
private MediaDrm mMediaDrm;
- public EventHandler(MediaDrm md, Looper looper) {
+ public EventHandler(@NonNull MediaDrm md, @NonNull Looper looper) {
super(looper);
mMediaDrm = md;
}
@Override
- public void handleMessage(Message msg) {
+ public void handleMessage(@NonNull Message msg) {
if (mMediaDrm.mNativeContext == 0) {
Log.w(TAG, "MediaDrm went away with unhandled events");
return;
@@ -297,8 +508,6 @@ public final class MediaDrm {
switch(msg.what) {
case DRM_EVENT:
- Log.i(TAG, "Drm event (" + msg.arg1 + "," + msg.arg2 + ")");
-
if (mOnEventListener != null) {
if (msg.obj != null && msg.obj instanceof Parcel) {
Parcel parcel = (Parcel)msg.obj;
@@ -310,11 +519,46 @@ public final class MediaDrm {
if (data.length == 0) {
data = null;
}
+
+ Log.i(TAG, "Drm event (" + msg.arg1 + "," + msg.arg2 + ")");
mOnEventListener.onEvent(mMediaDrm, sessionId, msg.arg1, msg.arg2, data);
}
}
return;
+ case KEY_STATUS_CHANGE:
+ if (mOnKeyStatusChangeListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ List<KeyStatus> keyStatusList = keyStatusListFromParcel(parcel);
+ boolean hasNewUsableKey = (parcel.readInt() != 0);
+
+ Log.i(TAG, "Drm key status changed");
+ mOnKeyStatusChangeListener.onKeyStatusChange(mMediaDrm, sessionId,
+ keyStatusList, hasNewUsableKey);
+ }
+ }
+ }
+ return;
+
+ case EXPIRATION_UPDATE:
+ if (mOnExpirationUpdateListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ long expirationTime = parcel.readLong();
+
+ Log.i(TAG, "Drm key expiration update: " + expirationTime);
+ mOnExpirationUpdateListener.onExpirationUpdate(mMediaDrm, sessionId,
+ expirationTime);
+ }
+ }
+ }
+ return;
+
default:
Log.e(TAG, "Unknown message type " + msg.what);
return;
@@ -322,22 +566,37 @@ public final class MediaDrm {
}
}
- /*
+ /**
+ * Parse a list of KeyStatus objects from an event parcel
+ */
+ @NonNull
+ private List<KeyStatus> keyStatusListFromParcel(@NonNull Parcel parcel) {
+ int nelems = parcel.readInt();
+ List<KeyStatus> keyStatusList = new ArrayList(nelems);
+ while (nelems-- > 0) {
+ byte[] keyId = parcel.createByteArray();
+ int keyStatusCode = parcel.readInt();
+ keyStatusList.add(new KeyStatus(keyId, keyStatusCode));
+ }
+ return keyStatusList;
+ }
+
+ /**
* This method is called from native code when an event occurs. This method
* just uses the EventHandler system to post the event back to the main app thread.
* We use a weak reference to the original MediaPlayer object so that the native
* code is safe from the object disappearing from underneath it. (This is
* the cookie passed to native_setup().)
*/
- private static void postEventFromNative(Object mediadrm_ref,
- int eventType, int extra, Object obj)
+ private static void postEventFromNative(@NonNull Object mediadrm_ref,
+ int what, int eventType, int extra, @Nullable Object obj)
{
- MediaDrm md = (MediaDrm)((WeakReference)mediadrm_ref).get();
+ MediaDrm md = (MediaDrm)((WeakReference<MediaDrm>)mediadrm_ref).get();
if (md == null) {
return;
}
if (md.mEventHandler != null) {
- Message m = md.mEventHandler.obtainMessage(DRM_EVENT, eventType, extra, obj);
+ Message m = md.mEventHandler.obtainMessage(what, eventType, extra, obj);
md.mEventHandler.sendMessage(m);
}
}
@@ -348,6 +607,7 @@ public final class MediaDrm {
* @throws NotProvisionedException if provisioning is needed
* @throws ResourceBusyException if required resources are in use
*/
+ @NonNull
public native byte[] openSession() throws NotProvisionedException,
ResourceBusyException;
@@ -355,7 +615,7 @@ public final class MediaDrm {
* Close a session on the MediaDrm object that was previously opened
* with {@link #openSession}.
*/
- public native void closeSession(byte[] sessionId);
+ public native void closeSession(@NonNull byte[] sessionId);
/**
* This key request type species that the keys will be for online use, they will
@@ -375,26 +635,87 @@ public final class MediaDrm {
*/
public static final int KEY_TYPE_RELEASE = 3;
+ /** @hide */
+ @IntDef({
+ KEY_TYPE_STREAMING,
+ KEY_TYPE_OFFLINE,
+ KEY_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyType {}
+
/**
* Contains the opaque data an app uses to request keys from a license server
*/
- public final static class KeyRequest {
+ public static final class KeyRequest {
private byte[] mData;
private String mDefaultUrl;
+ private int mRequestType;
+
+ /**
+ * Key request type is initial license request
+ */
+ public static final int REQUEST_TYPE_INITIAL = 0;
+
+ /**
+ * Key request type is license renewal
+ */
+ public static final int REQUEST_TYPE_RENEWAL = 1;
+
+ /**
+ * Key request type is license release
+ */
+ public static final int REQUEST_TYPE_RELEASE = 2;
+
+ /** @hide */
+ @IntDef({
+ REQUEST_TYPE_INITIAL,
+ REQUEST_TYPE_RENEWAL,
+ REQUEST_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RequestType {}
KeyRequest() {}
/**
* Get the opaque message data
*/
- public byte[] getData() { return mData; }
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mData;
+ }
/**
* Get the default URL to use when sending the key request message to a
* server, if known. The app may prefer to use a different license
* server URL from other sources.
+ * This method returns an empty string if the default URL is not known.
*/
- public String getDefaultUrl() { return mDefaultUrl; }
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
+
+ /**
+ * Get the type of the request
+ * @return one of {@link #REQUEST_TYPE_INITIAL},
+ * {@link #REQUEST_TYPE_RENEWAL} or {@link #REQUEST_TYPE_RELEASE}
+ */
+ @RequestType
+ public int getRequestType() { return mRequestType; }
};
/**
@@ -426,12 +747,15 @@ public final class MediaDrm {
* keys, which are identified by a keySetId.
* @param optionalParameters are included in the key request message to
* allow a client application to provide additional message parameters to the server.
- *
+ * This may be {@code null} if no additional parameters are to be sent.
* @throws NotProvisionedException if reprovisioning is needed, due to a
* problem with the certifcate
*/
- public native KeyRequest getKeyRequest(byte[] scope, byte[] init,
- String mimeType, int keyType, HashMap<String, String> optionalParameters)
+ @NonNull
+ public native KeyRequest getKeyRequest(
+ @NonNull byte[] scope, @Nullable byte[] init,
+ @Nullable String mimeType, @KeyType int keyType,
+ @Nullable HashMap<String, String> optionalParameters)
throws NotProvisionedException;
@@ -453,9 +777,10 @@ public final class MediaDrm {
* reprovisioning is required
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
- * @throws ResourceBusyException if required resources are in use
*/
- public native byte[] provideKeyResponse(byte[] scope, byte[] response)
+ @Nullable
+ public native byte[] provideKeyResponse(
+ @NonNull byte[] scope, @NonNull byte[] response)
throws NotProvisionedException, DeniedByServerException;
@@ -466,14 +791,14 @@ public final class MediaDrm {
* @param sessionId the session ID for the DRM session
* @param keySetId identifies the saved key set to restore
*/
- public native void restoreKeys(byte[] sessionId, byte[] keySetId);
+ public native void restoreKeys(@NonNull byte[] sessionId, @NonNull byte[] keySetId);
/**
* Remove the current keys from a session.
*
* @param sessionId the session ID for the DRM session
*/
- public native void removeKeys(byte[] sessionId);
+ public native void removeKeys(@NonNull byte[] sessionId);
/**
* Request an informative description of the key status for the session. The status is
@@ -484,26 +809,46 @@ public final class MediaDrm {
*
* @param sessionId the session ID for the DRM session
*/
- public native HashMap<String, String> queryKeyStatus(byte[] sessionId);
+ @NonNull
+ public native HashMap<String, String> queryKeyStatus(@NonNull byte[] sessionId);
/**
* Contains the opaque data an app uses to request a certificate from a provisioning
* server
*/
- public final static class ProvisionRequest {
+ public static final class ProvisionRequest {
ProvisionRequest() {}
/**
* Get the opaque message data
*/
- public byte[] getData() { return mData; }
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mData;
+ }
/**
* Get the default URL to use when sending the provision request
* message to a server, if known. The app may prefer to use a different
* provisioning server URL obtained from other sources.
+ * This method returns an empty string if the default URL is not known.
*/
- public String getDefaultUrl() { return mDefaultUrl; }
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the ProvisionRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
private byte[] mData;
private String mDefaultUrl;
@@ -518,12 +863,14 @@ public final class MediaDrm {
* is returned in ProvisionRequest.data. The recommended URL to deliver the provision
* request to is returned in ProvisionRequest.defaultUrl.
*/
+ @NonNull
public ProvisionRequest getProvisionRequest() {
return getProvisionRequestNative(CERTIFICATE_TYPE_NONE, "");
}
+ @NonNull
private native ProvisionRequest getProvisionRequestNative(int certType,
- String certAuthority);
+ @NonNull String certAuthority);
/**
* After a provision response is received by the app, it is provided to the DRM
@@ -535,12 +882,14 @@ public final class MediaDrm {
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
*/
- public void provideProvisionResponse(byte[] response)
+ public void provideProvisionResponse(@NonNull byte[] response)
throws DeniedByServerException {
provideProvisionResponseNative(response);
}
- private native Certificate provideProvisionResponseNative(byte[] response)
+ @NonNull
+ /* could there be a valid response with 0-sized certificate or key? */
+ private native Certificate provideProvisionResponseNative(@NonNull byte[] response)
throws DeniedByServerException;
/**
@@ -570,6 +919,7 @@ public final class MediaDrm {
* record on the client is only removed after positive confirmation that the server
* received the message using releaseSecureStops().
*/
+ @NonNull
public native List<byte[]> getSecureStops();
/**
@@ -577,7 +927,8 @@ public final class MediaDrm {
*
* @param ssid - The secure stop ID provided by the license server.
*/
- public native byte[] getSecureStop(byte[] ssid);
+ @NonNull
+ public native byte[] getSecureStop(@NonNull byte[] ssid);
/**
* Process the SecureStop server response message ssRelease. After authenticating
@@ -585,7 +936,7 @@ public final class MediaDrm {
*
* @param ssRelease the server response indicating which secure stops to release
*/
- public native void releaseSecureStops(byte[] ssRelease);
+ public native void releaseSecureStops(@NonNull byte[] ssRelease);
/**
* Remove all secure stops without requiring interaction with the server.
@@ -614,6 +965,16 @@ public final class MediaDrm {
*/
public static final String PROPERTY_ALGORITHMS = "algorithms";
+ /** @hide */
+ @StringDef({
+ PROPERTY_VENDOR,
+ PROPERTY_VERSION,
+ PROPERTY_DESCRIPTION,
+ PROPERTY_ALGORITHMS,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface StringProperty {}
+
/**
* Read a DRM engine plugin String property value, given the property name string.
* <p>
@@ -621,51 +982,68 @@ public final class MediaDrm {
* {@link #PROPERTY_VENDOR}, {@link #PROPERTY_VERSION},
* {@link #PROPERTY_DESCRIPTION}, {@link #PROPERTY_ALGORITHMS}
*/
- public native String getPropertyString(String propertyName);
-
+ /* FIXME this throws IllegalStateException for invalid property names */
+ @NonNull
+ public native String getPropertyString(@NonNull @StringProperty String propertyName);
/**
* Byte array property name: the device unique identifier is established during
* device provisioning and provides a means of uniquely identifying each device.
*/
+ /* FIXME this throws IllegalStateException for invalid property names */
public static final String PROPERTY_DEVICE_UNIQUE_ID = "deviceUniqueId";
+ /** @hide */
+ @StringDef({
+ PROPERTY_DEVICE_UNIQUE_ID,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ArrayProperty {}
+
/**
* Read a DRM engine plugin byte array property value, given the property name string.
* <p>
* Standard fields names are {@link #PROPERTY_DEVICE_UNIQUE_ID}
*/
- public native byte[] getPropertyByteArray(String propertyName);
-
+ @NonNull
+ public native byte[] getPropertyByteArray(@ArrayProperty String propertyName);
/**
* Set a DRM engine plugin String property value.
*/
- public native void setPropertyString(String propertyName, String value);
+ public native void setPropertyString(
+ @StringProperty String propertyName, @NonNull String value);
/**
* Set a DRM engine plugin byte array property value.
*/
- public native void setPropertyByteArray(String propertyName, byte[] value);
-
+ public native void setPropertyByteArray(
+ @ArrayProperty String propertyName, @NonNull byte[] value);
- private static final native void setCipherAlgorithmNative(MediaDrm drm, byte[] sessionId,
- String algorithm);
+ private static final native void setCipherAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
- private static final native void setMacAlgorithmNative(MediaDrm drm, byte[] sessionId,
- String algorithm);
+ private static final native void setMacAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
- private static final native byte[] encryptNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] input, byte[] iv);
+ @NonNull
+ private static final native byte[] encryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
- private static final native byte[] decryptNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] input, byte[] iv);
+ @NonNull
+ private static final native byte[] decryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
- private static final native byte[] signNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] message);
+ @NonNull
+ private static final native byte[] signNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message);
- private static final native boolean verifyNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] message, byte[] signature);
+ private static final native boolean verifyNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message, @NonNull byte[] signature);
/**
* In addition to supporting decryption of DASH Common Encrypted Media, the
@@ -694,8 +1072,8 @@ public final class MediaDrm {
private MediaDrm mDrm;
private byte[] mSessionId;
- CryptoSession(MediaDrm drm, byte[] sessionId,
- String cipherAlgorithm, String macAlgorithm)
+ CryptoSession(@NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm, @NonNull String macAlgorithm)
{
mSessionId = sessionId;
mDrm = drm;
@@ -710,7 +1088,9 @@ public final class MediaDrm {
* @param input the data to encrypt
* @param iv the initialization vector to use for the cipher
*/
- public byte[] encrypt(byte[] keyid, byte[] input, byte[] iv) {
+ @NonNull
+ public byte[] encrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
return encryptNative(mDrm, mSessionId, keyid, input, iv);
}
@@ -721,7 +1101,9 @@ public final class MediaDrm {
* @param input the data to encrypt
* @param iv the initialization vector to use for the cipher
*/
- public byte[] decrypt(byte[] keyid, byte[] input, byte[] iv) {
+ @NonNull
+ public byte[] decrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
return decryptNative(mDrm, mSessionId, keyid, input, iv);
}
@@ -731,7 +1113,8 @@ public final class MediaDrm {
* @param keyid specifies which key to use
* @param message the data for which a signature is to be computed
*/
- public byte[] sign(byte[] keyid, byte[] message) {
+ @NonNull
+ public byte[] sign(@NonNull byte[] keyid, @NonNull byte[] message) {
return signNative(mDrm, mSessionId, keyid, message);
}
@@ -744,7 +1127,8 @@ public final class MediaDrm {
* @param signature the reference signature which will be compared with the
* computed signature
*/
- public boolean verify(byte[] keyid, byte[] message, byte[] signature) {
+ public boolean verify(
+ @NonNull byte[] keyid, @NonNull byte[] message, @NonNull byte[] signature) {
return verifyNative(mDrm, mSessionId, keyid, message, signature);
}
};
@@ -769,8 +1153,9 @@ public final class MediaDrm {
* using the method {@link #getPropertyString} with the property name
* "algorithms".
*/
- public CryptoSession getCryptoSession(byte[] sessionId,
- String cipherAlgorithm, String macAlgorithm)
+ public CryptoSession getCryptoSession(
+ @NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm, @NonNull String macAlgorithm)
{
return new CryptoSession(this, sessionId, cipherAlgorithm, macAlgorithm);
}
@@ -781,11 +1166,11 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public final static class CertificateRequest {
+ public static final class CertificateRequest {
private byte[] mData;
private String mDefaultUrl;
- CertificateRequest(byte[] data, String defaultUrl) {
+ CertificateRequest(@NonNull byte[] data, @NonNull String defaultUrl) {
mData = data;
mDefaultUrl = defaultUrl;
}
@@ -793,6 +1178,7 @@ public final class MediaDrm {
/**
* Get the opaque message data
*/
+ @NonNull
public byte[] getData() { return mData; }
/**
@@ -800,6 +1186,7 @@ public final class MediaDrm {
* message to a server, if known. The app may prefer to use a different
* certificate server URL obtained from other sources.
*/
+ @NonNull
public String getDefaultUrl() { return mDefaultUrl; }
}
@@ -815,8 +1202,9 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public CertificateRequest getCertificateRequest(int certType,
- String certAuthority)
+ @NonNull
+ public CertificateRequest getCertificateRequest(
+ @CertificateType int certType, @NonNull String certAuthority)
{
ProvisionRequest provisionRequest = getProvisionRequestNative(certType, certAuthority);
return new CertificateRequest(provisionRequest.getData(),
@@ -829,18 +1217,36 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public final static class Certificate {
+ public static final class Certificate {
Certificate() {}
/**
* Get the wrapped private key data
*/
- public byte[] getWrappedPrivateKey() { return mWrappedKey; }
+ @NonNull
+ public byte[] getWrappedPrivateKey() {
+ if (mWrappedKey == null) {
+ // this should never happen as mWrappedKey is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mWrappedKey;
+ }
/**
* Get the PEM-encoded certificate chain
*/
- public byte[] getContent() { return mCertificateData; }
+ @NonNull
+ public byte[] getContent() {
+ if (mCertificateData == null) {
+ // this should never happen as mCertificateData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mCertificateData;
+ }
private byte[] mWrappedKey;
private byte[] mCertificateData;
@@ -864,13 +1270,16 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public Certificate provideCertificateResponse(byte[] response)
+ @NonNull
+ public Certificate provideCertificateResponse(@NonNull byte[] response)
throws DeniedByServerException {
return provideProvisionResponseNative(response);
}
- private static final native byte[] signRSANative(MediaDrm drm, byte[] sessionId,
- String algorithm, byte[] wrappedKey, byte[] message);
+ @NonNull
+ private static final native byte[] signRSANative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull String algorithm, @NonNull byte[] wrappedKey, @NonNull byte[] message);
/**
* Sign data using an RSA key
@@ -883,8 +1292,10 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public byte[] signRSA(byte[] sessionId, String algorithm,
- byte[] wrappedKey, byte[] message) {
+ @NonNull
+ public byte[] signRSA(
+ @NonNull byte[] sessionId, @NonNull String algorithm,
+ @NonNull byte[] wrappedKey, @NonNull byte[] message) {
return signRSANative(this, sessionId, algorithm, wrappedKey, message);
}
diff --git a/media/java/android/media/MediaExtractor.java b/media/java/android/media/MediaExtractor.java
index b23b540..0bf995f 100644
--- a/media/java/android/media/MediaExtractor.java
+++ b/media/java/android/media/MediaExtractor.java
@@ -16,6 +16,9 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.content.ContentResolver;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
@@ -27,6 +30,8 @@ import android.os.IBinder;
import java.io.FileDescriptor;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
@@ -66,23 +71,29 @@ final public class MediaExtractor {
}
/**
- * Sets the DataSource object to be used as the data source for this extractor
- * {@hide}
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to extract from
+ *
+ * @throws IllegalArgumentException if dataSource is invalid.
*/
- public native final void setDataSource(DataSource source) throws IOException;
+ public native final void setDataSource(@NonNull MediaDataSource dataSource)
+ throws IOException;
/**
* Sets the data source as a content Uri.
*
* @param context the Context to use when resolving the Uri
* @param uri the Content URI of the data you want to extract from.
- * @param headers the headers to be sent together with the request for the data
+ * @param headers the headers to be sent together with the request for the data.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
*/
public final void setDataSource(
- Context context, Uri uri, Map<String, String> headers)
+ @NonNull Context context, @NonNull Uri uri, @Nullable Map<String, String> headers)
throws IOException {
String scheme = uri.getScheme();
- if(scheme == null || scheme.equals("file")) {
+ if (scheme == null || scheme.equals("file")) {
setDataSource(uri.getPath());
return;
}
@@ -121,9 +132,11 @@ final public class MediaExtractor {
* Sets the data source (file-path or http URL) to use.
*
* @param path the path of the file, or the http URL
- * @param headers the headers associated with the http request for the stream you want to play
+ * @param headers the headers associated with the http request for the stream you want to play.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
*/
- public final void setDataSource(String path, Map<String, String> headers)
+ public final void setDataSource(@NonNull String path, @Nullable Map<String, String> headers)
throws IOException {
String[] keys = null;
String[] values = null;
@@ -148,10 +161,10 @@ final public class MediaExtractor {
}
private native final void nativeSetDataSource(
- IBinder httpServiceBinder,
- String path,
- String[] keys,
- String[] values) throws IOException;
+ @NonNull IBinder httpServiceBinder,
+ @NonNull String path,
+ @Nullable String[] keys,
+ @Nullable String[] values) throws IOException;
/**
* Sets the data source (file-path or http URL) to use.
@@ -165,7 +178,7 @@ final public class MediaExtractor {
* As an alternative, the application could first open the file for reading,
* and then use the file descriptor form {@link #setDataSource(FileDescriptor)}.
*/
- public final void setDataSource(String path) throws IOException {
+ public final void setDataSource(@NonNull String path) throws IOException {
nativeSetDataSource(
MediaHTTPService.createHttpServiceBinderIfNecessary(path),
path,
@@ -179,7 +192,7 @@ final public class MediaExtractor {
*
* @param fd the FileDescriptor for the file you want to extract from.
*/
- public final void setDataSource(FileDescriptor fd) throws IOException {
+ public final void setDataSource(@NonNull FileDescriptor fd) throws IOException {
setDataSource(fd, 0, 0x7ffffffffffffffL);
}
@@ -193,7 +206,7 @@ final public class MediaExtractor {
* @param length the length in bytes of the data to be extracted
*/
public native final void setDataSource(
- FileDescriptor fd, long offset, long length) throws IOException;
+ @NonNull FileDescriptor fd, long offset, long length) throws IOException;
@Override
protected void finalize() {
@@ -216,7 +229,9 @@ final public class MediaExtractor {
* Get the PSSH info if present.
* @return a map of uuid-to-bytes, with the uuid specifying
* the crypto scheme, and the bytes being the data specific to that scheme.
+ * This can be {@code null} if the source does not contain PSSH info.
*/
+ @Nullable
public Map<UUID, byte[]> getPsshInfo() {
Map<UUID, byte[]> psshMap = null;
Map<String, Object> formatMap = getFileFormatNative();
@@ -242,16 +257,19 @@ final public class MediaExtractor {
return psshMap;
}
+ @NonNull
private native Map<String, Object> getFileFormatNative();
/**
* Get the track format at the specified index.
* More detail on the representation can be found at {@link android.media.MediaCodec}
*/
+ @NonNull
public MediaFormat getTrackFormat(int index) {
return new MediaFormat(getTrackFormatNative(index));
}
+ @NonNull
private native Map<String, Object> getTrackFormatNative(int index);
/**
@@ -283,11 +301,20 @@ final public class MediaExtractor {
*/
public static final int SEEK_TO_CLOSEST_SYNC = 2;
+ /** @hide */
+ @IntDef({
+ SEEK_TO_PREVIOUS_SYNC,
+ SEEK_TO_NEXT_SYNC,
+ SEEK_TO_CLOSEST_SYNC,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SeekMode {}
+
/**
* All selected tracks seek near the requested time according to the
* specified mode.
*/
- public native void seekTo(long timeUs, int mode);
+ public native void seekTo(long timeUs, @SeekMode int mode);
/**
* Advance to the next sample. Returns false if no more sample data
@@ -304,7 +331,7 @@ final public class MediaExtractor {
* @param byteBuf the destination byte buffer
* @return the sample size (or -1 if no more samples are available).
*/
- public native int readSampleData(ByteBuffer byteBuf, int offset);
+ public native int readSampleData(@NonNull ByteBuffer byteBuf, int offset);
/**
* Returns the track index the current sample originates from (or -1
@@ -333,9 +360,20 @@ final public class MediaExtractor {
*/
public static final int SAMPLE_FLAG_ENCRYPTED = 2;
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ SAMPLE_FLAG_SYNC,
+ SAMPLE_FLAG_ENCRYPTED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SampleFlag {}
+
/**
* Returns the current sample's flags.
*/
+ @SampleFlag
public native int getSampleFlags();
/**
@@ -346,7 +384,7 @@ final public class MediaExtractor {
* to be filled in.
* @return true iff the sample flags contain {@link #SAMPLE_FLAG_ENCRYPTED}
*/
- public native boolean getSampleCryptoInfo(MediaCodec.CryptoInfo info);
+ public native boolean getSampleCryptoInfo(@NonNull MediaCodec.CryptoInfo info);
/**
* Returns an estimate of how much data is presently cached in memory
diff --git a/media/java/android/media/MediaFocusControl.java b/media/java/android/media/MediaFocusControl.java
deleted file mode 100644
index 6518bd1..0000000
--- a/media/java/android/media/MediaFocusControl.java
+++ /dev/null
@@ -1,2197 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.app.Activity;
-import android.app.ActivityManager;
-import android.app.AppOpsManager;
-import android.app.KeyguardManager;
-import android.app.PendingIntent;
-import android.app.PendingIntent.CanceledException;
-import android.app.PendingIntent.OnFinished;
-import android.content.ActivityNotFoundException;
-import android.content.BroadcastReceiver;
-import android.content.ComponentName;
-import android.content.ContentResolver;
-import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.content.pm.PackageManager;
-import android.database.ContentObserver;
-import android.media.PlayerRecord.RemotePlaybackState;
-import android.media.audiopolicy.IAudioPolicyCallback;
-import android.net.Uri;
-import android.os.Binder;
-import android.os.Bundle;
-import android.os.Handler;
-import android.os.IBinder;
-import android.os.Looper;
-import android.os.Message;
-import android.os.PowerManager;
-import android.os.RemoteException;
-import android.os.UserHandle;
-import android.os.IBinder.DeathRecipient;
-import android.provider.Settings;
-import android.speech.RecognizerIntent;
-import android.telephony.PhoneStateListener;
-import android.telephony.TelephonyManager;
-import android.util.Log;
-import android.util.Slog;
-import android.view.KeyEvent;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Stack;
-
-/**
- * @hide
- *
- */
-public class MediaFocusControl implements OnFinished {
-
- private static final String TAG = "MediaFocusControl";
-
- /** Debug remote control client/display feature */
- protected static final boolean DEBUG_RC = false;
- /** Debug volumes */
- protected static final boolean DEBUG_VOL = false;
-
- /** Used to alter media button redirection when the phone is ringing. */
- private boolean mIsRinging = false;
-
- private final PowerManager.WakeLock mMediaEventWakeLock;
- private final MediaEventHandler mEventHandler;
- private final Context mContext;
- private final ContentResolver mContentResolver;
- private final AudioService.VolumeController mVolumeController;
- private final AppOpsManager mAppOps;
- private final KeyguardManager mKeyguardManager;
- private final AudioService mAudioService;
- private final NotificationListenerObserver mNotifListenerObserver;
-
- protected MediaFocusControl(Looper looper, Context cntxt,
- AudioService.VolumeController volumeCtrl, AudioService as) {
- mEventHandler = new MediaEventHandler(looper);
- mContext = cntxt;
- mContentResolver = mContext.getContentResolver();
- mVolumeController = volumeCtrl;
- mAudioService = as;
-
- PowerManager pm = (PowerManager)mContext.getSystemService(Context.POWER_SERVICE);
- mMediaEventWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "handleMediaEvent");
- mMainRemote = new RemotePlaybackState(-1,
- AudioService.getMaxStreamVolume(AudioManager.STREAM_MUSIC),
- AudioService.getMaxStreamVolume(AudioManager.STREAM_MUSIC));
-
- // Register for phone state monitoring
- TelephonyManager tmgr = (TelephonyManager)
- mContext.getSystemService(Context.TELEPHONY_SERVICE);
- tmgr.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
-
- mAppOps = (AppOpsManager)mContext.getSystemService(Context.APP_OPS_SERVICE);
- mKeyguardManager =
- (KeyguardManager) mContext.getSystemService(Context.KEYGUARD_SERVICE);
- mNotifListenerObserver = new NotificationListenerObserver();
-
- mHasRemotePlayback = false;
- mMainRemoteIsActive = false;
-
- PlayerRecord.setMediaFocusControl(this);
-
- postReevaluateRemote();
- }
-
- protected void dump(PrintWriter pw) {
- dumpFocusStack(pw);
- dumpRCStack(pw);
- dumpRCCStack(pw);
- dumpRCDList(pw);
- }
-
- //==========================================================================================
- // Management of RemoteControlDisplay registration permissions
- //==========================================================================================
- private final static Uri ENABLED_NOTIFICATION_LISTENERS_URI =
- Settings.Secure.getUriFor(Settings.Secure.ENABLED_NOTIFICATION_LISTENERS);
-
- private class NotificationListenerObserver extends ContentObserver {
-
- NotificationListenerObserver() {
- super(mEventHandler);
- mContentResolver.registerContentObserver(Settings.Secure.getUriFor(
- Settings.Secure.ENABLED_NOTIFICATION_LISTENERS), false, this);
- }
-
- @Override
- public void onChange(boolean selfChange, Uri uri) {
- if (!ENABLED_NOTIFICATION_LISTENERS_URI.equals(uri) || selfChange) {
- return;
- }
- if (DEBUG_RC) { Log.d(TAG, "NotificationListenerObserver.onChange()"); }
- postReevaluateRemoteControlDisplays();
- }
- }
-
- private final static int RCD_REG_FAILURE = 0;
- private final static int RCD_REG_SUCCESS_PERMISSION = 1;
- private final static int RCD_REG_SUCCESS_ENABLED_NOTIF = 2;
-
- /**
- * Checks a caller's authorization to register an IRemoteControlDisplay.
- * Authorization is granted if one of the following is true:
- * <ul>
- * <li>the caller has android.Manifest.permission.MEDIA_CONTENT_CONTROL permission</li>
- * <li>the caller's listener is one of the enabled notification listeners</li>
- * </ul>
- * @return RCD_REG_FAILURE if it's not safe to proceed with the IRemoteControlDisplay
- * registration.
- */
- private int checkRcdRegistrationAuthorization(ComponentName listenerComp) {
- // MEDIA_CONTENT_CONTROL permission check
- if (PackageManager.PERMISSION_GRANTED == mContext.checkCallingOrSelfPermission(
- android.Manifest.permission.MEDIA_CONTENT_CONTROL)) {
- if (DEBUG_RC) { Log.d(TAG, "ok to register Rcd: has MEDIA_CONTENT_CONTROL permission");}
- return RCD_REG_SUCCESS_PERMISSION;
- }
-
- // ENABLED_NOTIFICATION_LISTENERS settings check
- if (listenerComp != null) {
- // this call is coming from an app, can't use its identity to read secure settings
- final long ident = Binder.clearCallingIdentity();
- try {
- final int currentUser = ActivityManager.getCurrentUser();
- final String enabledNotifListeners = Settings.Secure.getStringForUser(
- mContext.getContentResolver(),
- Settings.Secure.ENABLED_NOTIFICATION_LISTENERS,
- currentUser);
- if (enabledNotifListeners != null) {
- final String[] components = enabledNotifListeners.split(":");
- for (int i=0; i<components.length; i++) {
- final ComponentName component =
- ComponentName.unflattenFromString(components[i]);
- if (component != null) {
- if (listenerComp.equals(component)) {
- if (DEBUG_RC) { Log.d(TAG, "ok to register RCC: " + component +
- " is authorized notification listener"); }
- return RCD_REG_SUCCESS_ENABLED_NOTIF;
- }
- }
- }
- }
- if (DEBUG_RC) { Log.d(TAG, "not ok to register RCD, " + listenerComp +
- " is not in list of ENABLED_NOTIFICATION_LISTENERS"); }
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- return RCD_REG_FAILURE;
- }
-
- protected boolean registerRemoteController(IRemoteControlDisplay rcd, int w, int h,
- ComponentName listenerComp) {
- int reg = checkRcdRegistrationAuthorization(listenerComp);
- if (reg != RCD_REG_FAILURE) {
- registerRemoteControlDisplay_int(rcd, w, h, listenerComp);
- return true;
- } else {
- Slog.w(TAG, "Access denied to process: " + Binder.getCallingPid() +
- ", must have permission " + android.Manifest.permission.MEDIA_CONTENT_CONTROL +
- " or be an enabled NotificationListenerService for registerRemoteController");
- return false;
- }
- }
-
- protected boolean registerRemoteControlDisplay(IRemoteControlDisplay rcd, int w, int h) {
- int reg = checkRcdRegistrationAuthorization(null);
- if (reg != RCD_REG_FAILURE) {
- registerRemoteControlDisplay_int(rcd, w, h, null);
- return true;
- } else {
- Slog.w(TAG, "Access denied to process: " + Binder.getCallingPid() +
- ", must have permission " + android.Manifest.permission.MEDIA_CONTENT_CONTROL +
- " to register IRemoteControlDisplay");
- return false;
- }
- }
-
- private void postReevaluateRemoteControlDisplays() {
- sendMsg(mEventHandler, MSG_REEVALUATE_RCD, SENDMSG_QUEUE, 0, 0, null, 0);
- }
-
- private void onReevaluateRemoteControlDisplays() {
- if (DEBUG_RC) { Log.d(TAG, "onReevaluateRemoteControlDisplays()"); }
- // read which components are enabled notification listeners
- final int currentUser = ActivityManager.getCurrentUser();
- final String enabledNotifListeners = Settings.Secure.getStringForUser(
- mContext.getContentResolver(),
- Settings.Secure.ENABLED_NOTIFICATION_LISTENERS,
- currentUser);
- if (DEBUG_RC) { Log.d(TAG, " > enabled list: " + enabledNotifListeners); }
- synchronized(mAudioFocusLock) {
- synchronized(mPRStack) {
- // check whether the "enable" status of each RCD with a notification listener
- // has changed
- final String[] enabledComponents;
- if (enabledNotifListeners == null) {
- enabledComponents = null;
- } else {
- enabledComponents = enabledNotifListeners.split(":");
- }
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di =
- displayIterator.next();
- if (di.mClientNotifListComp != null) {
- boolean wasEnabled = di.mEnabled;
- di.mEnabled = isComponentInStringArray(di.mClientNotifListComp,
- enabledComponents);
- if (wasEnabled != di.mEnabled){
- try {
- // tell the RCD whether it's enabled
- di.mRcDisplay.setEnabled(di.mEnabled);
- // tell the RCCs about the change for this RCD
- enableRemoteControlDisplayForClient_syncRcStack(
- di.mRcDisplay, di.mEnabled);
- // when enabling, refresh the information on the display
- if (di.mEnabled) {
- sendMsg(mEventHandler, MSG_RCDISPLAY_INIT_INFO, SENDMSG_QUEUE,
- di.mArtworkExpectedWidth /*arg1*/,
- di.mArtworkExpectedHeight/*arg2*/,
- di.mRcDisplay /*obj*/, 0/*delay*/);
- }
- } catch (RemoteException e) {
- Log.e(TAG, "Error en/disabling RCD: ", e);
- }
- }
- }
- }
- }
- }
- }
-
- /**
- * @param comp a non-null ComponentName
- * @param enabledArray may be null
- * @return
- */
- private boolean isComponentInStringArray(ComponentName comp, String[] enabledArray) {
- if (enabledArray == null || enabledArray.length == 0) {
- if (DEBUG_RC) { Log.d(TAG, " > " + comp + " is NOT enabled"); }
- return false;
- }
- final String compString = comp.flattenToString();
- for (int i=0; i<enabledArray.length; i++) {
- if (compString.equals(enabledArray[i])) {
- if (DEBUG_RC) { Log.d(TAG, " > " + compString + " is enabled"); }
- return true;
- }
- }
- if (DEBUG_RC) { Log.d(TAG, " > " + compString + " is NOT enabled"); }
- return false;
- }
-
- //==========================================================================================
- // Internal event handling
- //==========================================================================================
-
- // event handler messages
- private static final int MSG_RCDISPLAY_CLEAR = 1;
- private static final int MSG_RCDISPLAY_UPDATE = 2;
- private static final int MSG_REEVALUATE_REMOTE = 3;
- private static final int MSG_RCC_NEW_PLAYBACK_INFO = 4;
- private static final int MSG_RCC_NEW_VOLUME_OBS = 5;
- private static final int MSG_RCC_NEW_PLAYBACK_STATE = 6;
- private static final int MSG_RCC_SEEK_REQUEST = 7;
- private static final int MSG_RCC_UPDATE_METADATA = 8;
- private static final int MSG_RCDISPLAY_INIT_INFO = 9;
- private static final int MSG_REEVALUATE_RCD = 10;
- private static final int MSG_UNREGISTER_MEDIABUTTONINTENT = 11;
-
- // sendMsg() flags
- /** If the msg is already queued, replace it with this one. */
- private static final int SENDMSG_REPLACE = 0;
- /** If the msg is already queued, ignore this one and leave the old. */
- private static final int SENDMSG_NOOP = 1;
- /** If the msg is already queued, queue this one and leave the old. */
- private static final int SENDMSG_QUEUE = 2;
-
- private static void sendMsg(Handler handler, int msg,
- int existingMsgPolicy, int arg1, int arg2, Object obj, int delay) {
-
- if (existingMsgPolicy == SENDMSG_REPLACE) {
- handler.removeMessages(msg);
- } else if (existingMsgPolicy == SENDMSG_NOOP && handler.hasMessages(msg)) {
- return;
- }
-
- handler.sendMessageDelayed(handler.obtainMessage(msg, arg1, arg2, obj), delay);
- }
-
- private class MediaEventHandler extends Handler {
- MediaEventHandler(Looper looper) {
- super(looper);
- }
-
- @Override
- public void handleMessage(Message msg) {
- switch(msg.what) {
- case MSG_RCDISPLAY_CLEAR:
- onRcDisplayClear();
- break;
-
- case MSG_RCDISPLAY_UPDATE:
- // msg.obj is guaranteed to be non null
- onRcDisplayUpdate( (PlayerRecord) msg.obj, msg.arg1);
- break;
-
- case MSG_REEVALUATE_REMOTE:
- onReevaluateRemote();
- break;
-
- case MSG_RCC_NEW_VOLUME_OBS:
- onRegisterVolumeObserverForRcc(msg.arg1 /* rccId */,
- (IRemoteVolumeObserver)msg.obj /* rvo */);
- break;
-
- case MSG_RCDISPLAY_INIT_INFO:
- // msg.obj is guaranteed to be non null
- onRcDisplayInitInfo((IRemoteControlDisplay)msg.obj /*newRcd*/,
- msg.arg1/*w*/, msg.arg2/*h*/);
- break;
-
- case MSG_REEVALUATE_RCD:
- onReevaluateRemoteControlDisplays();
- break;
-
- case MSG_UNREGISTER_MEDIABUTTONINTENT:
- unregisterMediaButtonIntent( (PendingIntent) msg.obj );
- break;
- }
- }
- }
-
-
- //==========================================================================================
- // AudioFocus
- //==========================================================================================
-
- /**
- * Constant to identify a focus stack entry that is used to hold the focus while the phone
- * is ringing or during a call. Used by com.android.internal.telephony.CallManager when
- * entering and exiting calls.
- */
- protected final static String IN_VOICE_COMM_FOCUS_ID = "AudioFocus_For_Phone_Ring_And_Calls";
-
- private final static Object mAudioFocusLock = new Object();
-
- private final static Object mRingingLock = new Object();
-
- private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
- @Override
- public void onCallStateChanged(int state, String incomingNumber) {
- if (state == TelephonyManager.CALL_STATE_RINGING) {
- //Log.v(TAG, " CALL_STATE_RINGING");
- synchronized(mRingingLock) {
- mIsRinging = true;
- }
- } else if ((state == TelephonyManager.CALL_STATE_OFFHOOK)
- || (state == TelephonyManager.CALL_STATE_IDLE)) {
- synchronized(mRingingLock) {
- mIsRinging = false;
- }
- }
- }
- };
-
- /**
- * Discard the current audio focus owner.
- * Notify top of audio focus stack that it lost focus (regardless of possibility to reassign
- * focus), remove it from the stack, and clear the remote control display.
- */
- protected void discardAudioFocusOwner() {
- synchronized(mAudioFocusLock) {
- if (!mFocusStack.empty()) {
- // notify the current focus owner it lost focus after removing it from stack
- final FocusRequester exFocusOwner = mFocusStack.pop();
- exFocusOwner.handleFocusLoss(AudioManager.AUDIOFOCUS_LOSS);
- exFocusOwner.release();
- }
- }
- }
-
- /**
- * Called synchronized on mAudioFocusLock
- */
- private void notifyTopOfAudioFocusStack() {
- // notify the top of the stack it gained focus
- if (!mFocusStack.empty()) {
- if (canReassignAudioFocus()) {
- mFocusStack.peek().handleFocusGain(AudioManager.AUDIOFOCUS_GAIN);
- }
- }
- }
-
- /**
- * Focus is requested, propagate the associated loss throughout the stack.
- * @param focusGain the new focus gain that will later be added at the top of the stack
- */
- private void propagateFocusLossFromGain_syncAf(int focusGain) {
- // going through the audio focus stack to signal new focus, traversing order doesn't
- // matter as all entries respond to the same external focus gain
- Iterator<FocusRequester> stackIterator = mFocusStack.iterator();
- while(stackIterator.hasNext()) {
- stackIterator.next().handleExternalFocusGain(focusGain);
- }
- }
-
- private final Stack<FocusRequester> mFocusStack = new Stack<FocusRequester>();
-
- /**
- * Helper function:
- * Display in the log the current entries in the audio focus stack
- */
- private void dumpFocusStack(PrintWriter pw) {
- pw.println("\nAudio Focus stack entries (last is top of stack):");
- synchronized(mAudioFocusLock) {
- Iterator<FocusRequester> stackIterator = mFocusStack.iterator();
- while(stackIterator.hasNext()) {
- stackIterator.next().dump(pw);
- }
- }
- pw.println("\n Notify on duck: " + mNotifyFocusOwnerOnDuck +"\n");
- }
-
- /**
- * Helper function:
- * Called synchronized on mAudioFocusLock
- * Remove a focus listener from the focus stack.
- * @param clientToRemove the focus listener
- * @param signal if true and the listener was at the top of the focus stack, i.e. it was holding
- * focus, notify the next item in the stack it gained focus.
- */
- private void removeFocusStackEntry(String clientToRemove, boolean signal,
- boolean notifyFocusFollowers) {
- // is the current top of the focus stack abandoning focus? (because of request, not death)
- if (!mFocusStack.empty() && mFocusStack.peek().hasSameClient(clientToRemove))
- {
- //Log.i(TAG, " removeFocusStackEntry() removing top of stack");
- FocusRequester fr = mFocusStack.pop();
- fr.release();
- if (notifyFocusFollowers) {
- final AudioFocusInfo afi = fr.toAudioFocusInfo();
- afi.clearLossReceived();
- notifyExtPolicyFocusLoss_syncAf(afi, false);
- }
- if (signal) {
- // notify the new top of the stack it gained focus
- notifyTopOfAudioFocusStack();
- }
- } else {
- // focus is abandoned by a client that's not at the top of the stack,
- // no need to update focus.
- // (using an iterator on the stack so we can safely remove an entry after having
- // evaluated it, traversal order doesn't matter here)
- Iterator<FocusRequester> stackIterator = mFocusStack.iterator();
- while(stackIterator.hasNext()) {
- FocusRequester fr = stackIterator.next();
- if(fr.hasSameClient(clientToRemove)) {
- Log.i(TAG, "AudioFocus removeFocusStackEntry(): removing entry for "
- + clientToRemove);
- stackIterator.remove();
- fr.release();
- }
- }
- }
- }
-
- /**
- * Helper function:
- * Called synchronized on mAudioFocusLock
- * Remove focus listeners from the focus stack for a particular client when it has died.
- */
- private void removeFocusStackEntryForClient(IBinder cb) {
- // is the owner of the audio focus part of the client to remove?
- boolean isTopOfStackForClientToRemove = !mFocusStack.isEmpty() &&
- mFocusStack.peek().hasSameBinder(cb);
- // (using an iterator on the stack so we can safely remove an entry after having
- // evaluated it, traversal order doesn't matter here)
- Iterator<FocusRequester> stackIterator = mFocusStack.iterator();
- while(stackIterator.hasNext()) {
- FocusRequester fr = stackIterator.next();
- if(fr.hasSameBinder(cb)) {
- Log.i(TAG, "AudioFocus removeFocusStackEntry(): removing entry for " + cb);
- stackIterator.remove();
- // the client just died, no need to unlink to its death
- }
- }
- if (isTopOfStackForClientToRemove) {
- // we removed an entry at the top of the stack:
- // notify the new top of the stack it gained focus.
- notifyTopOfAudioFocusStack();
- }
- }
-
- /**
- * Helper function:
- * Returns true if the system is in a state where the focus can be reevaluated, false otherwise.
- * The implementation guarantees that a state where focus cannot be immediately reassigned
- * implies that an "locked" focus owner is at the top of the focus stack.
- * Modifications to the implementation that break this assumption will cause focus requests to
- * misbehave when honoring the AudioManager.AUDIOFOCUS_FLAG_DELAY_OK flag.
- */
- private boolean canReassignAudioFocus() {
- // focus requests are rejected during a phone call or when the phone is ringing
- // this is equivalent to IN_VOICE_COMM_FOCUS_ID having the focus
- if (!mFocusStack.isEmpty() && isLockedFocusOwner(mFocusStack.peek())) {
- return false;
- }
- return true;
- }
-
- private boolean isLockedFocusOwner(FocusRequester fr) {
- return (fr.hasSameClient(IN_VOICE_COMM_FOCUS_ID) || fr.isLockedFocusOwner());
- }
-
- /**
- * Helper function
- * Pre-conditions: focus stack is not empty, there is one or more locked focus owner
- * at the top of the focus stack
- * Push the focus requester onto the audio focus stack at the first position immediately
- * following the locked focus owners.
- * @return {@link AudioManager#AUDIOFOCUS_REQUEST_GRANTED} or
- * {@link AudioManager#AUDIOFOCUS_REQUEST_DELAYED}
- */
- private int pushBelowLockedFocusOwners(FocusRequester nfr) {
- int lastLockedFocusOwnerIndex = mFocusStack.size();
- for (int index = mFocusStack.size()-1; index >= 0; index--) {
- if (isLockedFocusOwner(mFocusStack.elementAt(index))) {
- lastLockedFocusOwnerIndex = index;
- }
- }
- if (lastLockedFocusOwnerIndex == mFocusStack.size()) {
- // this should not happen, but handle it and log an error
- Log.e(TAG, "No exclusive focus owner found in propagateFocusLossFromGain_syncAf()",
- new Exception());
- // no exclusive owner, push at top of stack, focus is granted, propagate change
- propagateFocusLossFromGain_syncAf(nfr.getGainRequest());
- mFocusStack.push(nfr);
- return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
- } else {
- mFocusStack.insertElementAt(nfr, lastLockedFocusOwnerIndex);
- return AudioManager.AUDIOFOCUS_REQUEST_DELAYED;
- }
- }
-
- /**
- * Inner class to monitor audio focus client deaths, and remove them from the audio focus
- * stack if necessary.
- */
- protected class AudioFocusDeathHandler implements IBinder.DeathRecipient {
- private IBinder mCb; // To be notified of client's death
-
- AudioFocusDeathHandler(IBinder cb) {
- mCb = cb;
- }
-
- public void binderDied() {
- synchronized(mAudioFocusLock) {
- Log.w(TAG, " AudioFocus audio focus client died");
- removeFocusStackEntryForClient(mCb);
- }
- }
-
- public IBinder getBinder() {
- return mCb;
- }
- }
-
- /**
- * Indicates whether to notify an audio focus owner when it loses focus
- * with {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK} if it will only duck.
- * This variable being false indicates an AudioPolicy has been registered and has signaled
- * it will handle audio ducking.
- */
- private boolean mNotifyFocusOwnerOnDuck = true;
-
- protected void setDuckingInExtPolicyAvailable(boolean available) {
- mNotifyFocusOwnerOnDuck = !available;
- }
-
- boolean mustNotifyFocusOwnerOnDuck() { return mNotifyFocusOwnerOnDuck; }
-
- private ArrayList<IAudioPolicyCallback> mFocusFollowers = new ArrayList<IAudioPolicyCallback>();
-
- void addFocusFollower(IAudioPolicyCallback ff) {
- if (ff == null) {
- return;
- }
- synchronized(mAudioFocusLock) {
- boolean found = false;
- for (IAudioPolicyCallback pcb : mFocusFollowers) {
- if (pcb.asBinder().equals(ff.asBinder())) {
- found = true;
- break;
- }
- }
- if (found) {
- return;
- } else {
- mFocusFollowers.add(ff);
- }
- }
- }
-
- void removeFocusFollower(IAudioPolicyCallback ff) {
- if (ff == null) {
- return;
- }
- synchronized(mAudioFocusLock) {
- for (IAudioPolicyCallback pcb : mFocusFollowers) {
- if (pcb.asBinder().equals(ff.asBinder())) {
- mFocusFollowers.remove(pcb);
- break;
- }
- }
- }
- }
-
- /**
- * Called synchronized on mAudioFocusLock
- */
- void notifyExtPolicyFocusGrant_syncAf(AudioFocusInfo afi, int requestResult) {
- for (IAudioPolicyCallback pcb : mFocusFollowers) {
- try {
- // oneway
- pcb.notifyAudioFocusGrant(afi, requestResult);
- } catch (RemoteException e) {
- Log.e(TAG, "Can't call newAudioFocusLoser() on IAudioPolicyCallback "
- + pcb.asBinder(), e);
- }
- }
- }
-
- /**
- * Called synchronized on mAudioFocusLock
- */
- void notifyExtPolicyFocusLoss_syncAf(AudioFocusInfo afi, boolean wasDispatched) {
- for (IAudioPolicyCallback pcb : mFocusFollowers) {
- try {
- // oneway
- pcb.notifyAudioFocusLoss(afi, wasDispatched);
- } catch (RemoteException e) {
- Log.e(TAG, "Can't call newAudioFocusLoser() on IAudioPolicyCallback "
- + pcb.asBinder(), e);
- }
- }
- }
-
- protected int getCurrentAudioFocus() {
- synchronized(mAudioFocusLock) {
- if (mFocusStack.empty()) {
- return AudioManager.AUDIOFOCUS_NONE;
- } else {
- return mFocusStack.peek().getGainRequest();
- }
- }
- }
-
- /** @see AudioManager#requestAudioFocus(AudioManager.OnAudioFocusChangeListener, int, int, int) */
- protected int requestAudioFocus(AudioAttributes aa, int focusChangeHint, IBinder cb,
- IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags) {
- Log.i(TAG, " AudioFocus requestAudioFocus() from " + clientId + " req=" + focusChangeHint +
- "flags=0x" + Integer.toHexString(flags));
- // we need a valid binder callback for clients
- if (!cb.pingBinder()) {
- Log.e(TAG, " AudioFocus DOA client for requestAudioFocus(), aborting.");
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- }
-
- if (mAppOps.noteOp(AppOpsManager.OP_TAKE_AUDIO_FOCUS, Binder.getCallingUid(),
- callingPackageName) != AppOpsManager.MODE_ALLOWED) {
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- }
-
- synchronized(mAudioFocusLock) {
- boolean focusGrantDelayed = false;
- if (!canReassignAudioFocus()) {
- if ((flags & AudioManager.AUDIOFOCUS_FLAG_DELAY_OK) == 0) {
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- } else {
- // request has AUDIOFOCUS_FLAG_DELAY_OK: focus can't be
- // granted right now, so the requester will be inserted in the focus stack
- // to receive focus later
- focusGrantDelayed = true;
- }
- }
-
- // handle the potential premature death of the new holder of the focus
- // (premature death == death before abandoning focus)
- // Register for client death notification
- AudioFocusDeathHandler afdh = new AudioFocusDeathHandler(cb);
- try {
- cb.linkToDeath(afdh, 0);
- } catch (RemoteException e) {
- // client has already died!
- Log.w(TAG, "AudioFocus requestAudioFocus() could not link to "+cb+" binder death");
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
- }
-
- if (!mFocusStack.empty() && mFocusStack.peek().hasSameClient(clientId)) {
- // if focus is already owned by this client and the reason for acquiring the focus
- // hasn't changed, don't do anything
- final FocusRequester fr = mFocusStack.peek();
- if (fr.getGainRequest() == focusChangeHint && fr.getGrantFlags() == flags) {
- // unlink death handler so it can be gc'ed.
- // linkToDeath() creates a JNI global reference preventing collection.
- cb.unlinkToDeath(afdh, 0);
- notifyExtPolicyFocusGrant_syncAf(fr.toAudioFocusInfo(),
- AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
- return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
- }
- // the reason for the audio focus request has changed: remove the current top of
- // stack and respond as if we had a new focus owner
- if (!focusGrantDelayed) {
- mFocusStack.pop();
- // the entry that was "popped" is the same that was "peeked" above
- fr.release();
- }
- }
-
- // focus requester might already be somewhere below in the stack, remove it
- removeFocusStackEntry(clientId, false /* signal */, false /*notifyFocusFollowers*/);
-
- final FocusRequester nfr = new FocusRequester(aa, focusChangeHint, flags, fd, cb,
- clientId, afdh, callingPackageName, Binder.getCallingUid(), this);
- if (focusGrantDelayed) {
- // focusGrantDelayed being true implies we can't reassign focus right now
- // which implies the focus stack is not empty.
- final int requestResult = pushBelowLockedFocusOwners(nfr);
- if (requestResult != AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
- notifyExtPolicyFocusGrant_syncAf(nfr.toAudioFocusInfo(), requestResult);
- }
- return requestResult;
- } else {
- // propagate the focus change through the stack
- if (!mFocusStack.empty()) {
- propagateFocusLossFromGain_syncAf(focusChangeHint);
- }
-
- // push focus requester at the top of the audio focus stack
- mFocusStack.push(nfr);
- }
- notifyExtPolicyFocusGrant_syncAf(nfr.toAudioFocusInfo(),
- AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
-
- }//synchronized(mAudioFocusLock)
-
- return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
- }
-
- /**
- * @see AudioManager#abandonAudioFocus(AudioManager.OnAudioFocusChangeListener, AudioAttributes)
- * */
- protected int abandonAudioFocus(IAudioFocusDispatcher fl, String clientId, AudioAttributes aa) {
- // AudioAttributes are currently ignored, to be used for zones
- Log.i(TAG, " AudioFocus abandonAudioFocus() from " + clientId);
- try {
- // this will take care of notifying the new focus owner if needed
- synchronized(mAudioFocusLock) {
- removeFocusStackEntry(clientId, true /*signal*/, true /*notifyFocusFollowers*/);
- }
- } catch (java.util.ConcurrentModificationException cme) {
- // Catching this exception here is temporary. It is here just to prevent
- // a crash seen when the "Silent" notification is played. This is believed to be fixed
- // but this try catch block is left just to be safe.
- Log.e(TAG, "FATAL EXCEPTION AudioFocus abandonAudioFocus() caused " + cme);
- cme.printStackTrace();
- }
-
- return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
- }
-
-
- protected void unregisterAudioFocusClient(String clientId) {
- synchronized(mAudioFocusLock) {
- removeFocusStackEntry(clientId, false, true /*notifyFocusFollowers*/);
- }
- }
-
-
- //==========================================================================================
- // RemoteControl
- //==========================================================================================
- /**
- * No-op if the key code for keyEvent is not a valid media key
- * (see {@link #isValidMediaKeyEvent(KeyEvent)})
- * @param keyEvent the key event to send
- */
- protected void dispatchMediaKeyEvent(KeyEvent keyEvent) {
- filterMediaKeyEvent(keyEvent, false /*needWakeLock*/);
- }
-
- /**
- * No-op if the key code for keyEvent is not a valid media key
- * (see {@link #isValidMediaKeyEvent(KeyEvent)})
- * @param keyEvent the key event to send
- */
- protected void dispatchMediaKeyEventUnderWakelock(KeyEvent keyEvent) {
- filterMediaKeyEvent(keyEvent, true /*needWakeLock*/);
- }
-
- private void filterMediaKeyEvent(KeyEvent keyEvent, boolean needWakeLock) {
- // sanity check on the incoming key event
- if (!isValidMediaKeyEvent(keyEvent)) {
- Log.e(TAG, "not dispatching invalid media key event " + keyEvent);
- return;
- }
- // event filtering for telephony
- synchronized(mRingingLock) {
- synchronized(mPRStack) {
- if ((mMediaReceiverForCalls != null) &&
- (mIsRinging || (mAudioService.getMode() == AudioSystem.MODE_IN_CALL))) {
- dispatchMediaKeyEventForCalls(keyEvent, needWakeLock);
- return;
- }
- }
- }
- // event filtering based on voice-based interactions
- if (isValidVoiceInputKeyCode(keyEvent.getKeyCode())) {
- filterVoiceInputKeyEvent(keyEvent, needWakeLock);
- } else {
- dispatchMediaKeyEvent(keyEvent, needWakeLock);
- }
- }
-
- /**
- * Handles the dispatching of the media button events to the telephony package.
- * Precondition: mMediaReceiverForCalls != null
- * @param keyEvent a non-null KeyEvent whose key code is one of the supported media buttons
- * @param needWakeLock true if a PARTIAL_WAKE_LOCK needs to be held while this key event
- * is dispatched.
- */
- private void dispatchMediaKeyEventForCalls(KeyEvent keyEvent, boolean needWakeLock) {
- Intent keyIntent = new Intent(Intent.ACTION_MEDIA_BUTTON, null);
- keyIntent.putExtra(Intent.EXTRA_KEY_EVENT, keyEvent);
- keyIntent.setPackage(mMediaReceiverForCalls.getPackageName());
- if (needWakeLock) {
- mMediaEventWakeLock.acquire();
- keyIntent.putExtra(EXTRA_WAKELOCK_ACQUIRED, WAKELOCK_RELEASE_ON_FINISHED);
- }
- final long ident = Binder.clearCallingIdentity();
- try {
- mContext.sendOrderedBroadcastAsUser(keyIntent, UserHandle.ALL,
- null, mKeyEventDone, mEventHandler, Activity.RESULT_OK, null, null);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
-
- /**
- * Handles the dispatching of the media button events to one of the registered listeners,
- * or if there was none, broadcast an ACTION_MEDIA_BUTTON intent to the rest of the system.
- * @param keyEvent a non-null KeyEvent whose key code is one of the supported media buttons
- * @param needWakeLock true if a PARTIAL_WAKE_LOCK needs to be held while this key event
- * is dispatched.
- */
- private void dispatchMediaKeyEvent(KeyEvent keyEvent, boolean needWakeLock) {
- if (needWakeLock) {
- mMediaEventWakeLock.acquire();
- }
- Intent keyIntent = new Intent(Intent.ACTION_MEDIA_BUTTON, null);
- keyIntent.putExtra(Intent.EXTRA_KEY_EVENT, keyEvent);
- synchronized(mPRStack) {
- if (!mPRStack.empty()) {
- // send the intent that was registered by the client
- try {
- mPRStack.peek().getMediaButtonIntent().send(mContext,
- needWakeLock ? WAKELOCK_RELEASE_ON_FINISHED : 0 /*code*/,
- keyIntent, this, mEventHandler);
- } catch (CanceledException e) {
- Log.e(TAG, "Error sending pending intent " + mPRStack.peek());
- e.printStackTrace();
- }
- } else {
- // legacy behavior when nobody registered their media button event receiver
- // through AudioManager
- if (needWakeLock) {
- keyIntent.putExtra(EXTRA_WAKELOCK_ACQUIRED, WAKELOCK_RELEASE_ON_FINISHED);
- }
- final long ident = Binder.clearCallingIdentity();
- try {
- mContext.sendOrderedBroadcastAsUser(keyIntent, UserHandle.ALL,
- null, mKeyEventDone,
- mEventHandler, Activity.RESULT_OK, null, null);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
- }
- }
- }
-
- /**
- * The different actions performed in response to a voice button key event.
- */
- private final static int VOICEBUTTON_ACTION_DISCARD_CURRENT_KEY_PRESS = 1;
- private final static int VOICEBUTTON_ACTION_START_VOICE_INPUT = 2;
- private final static int VOICEBUTTON_ACTION_SIMULATE_KEY_PRESS = 3;
-
- private final Object mVoiceEventLock = new Object();
- private boolean mVoiceButtonDown;
- private boolean mVoiceButtonHandled;
-
- /**
- * Filter key events that may be used for voice-based interactions
- * @param keyEvent a non-null KeyEvent whose key code is that of one of the supported
- * media buttons that can be used to trigger voice-based interactions.
- * @param needWakeLock true if a PARTIAL_WAKE_LOCK needs to be held while this key event
- * is dispatched.
- */
- private void filterVoiceInputKeyEvent(KeyEvent keyEvent, boolean needWakeLock) {
- if (DEBUG_RC) {
- Log.v(TAG, "voice input key event: " + keyEvent + ", needWakeLock=" + needWakeLock);
- }
-
- int voiceButtonAction = VOICEBUTTON_ACTION_DISCARD_CURRENT_KEY_PRESS;
- int keyAction = keyEvent.getAction();
- synchronized (mVoiceEventLock) {
- if (keyAction == KeyEvent.ACTION_DOWN) {
- if (keyEvent.getRepeatCount() == 0) {
- // initial down
- mVoiceButtonDown = true;
- mVoiceButtonHandled = false;
- } else if (mVoiceButtonDown && !mVoiceButtonHandled
- && (keyEvent.getFlags() & KeyEvent.FLAG_LONG_PRESS) != 0) {
- // long-press, start voice-based interactions
- mVoiceButtonHandled = true;
- voiceButtonAction = VOICEBUTTON_ACTION_START_VOICE_INPUT;
- }
- } else if (keyAction == KeyEvent.ACTION_UP) {
- if (mVoiceButtonDown) {
- // voice button up
- mVoiceButtonDown = false;
- if (!mVoiceButtonHandled && !keyEvent.isCanceled()) {
- voiceButtonAction = VOICEBUTTON_ACTION_SIMULATE_KEY_PRESS;
- }
- }
- }
- }//synchronized (mVoiceEventLock)
-
- // take action after media button event filtering for voice-based interactions
- switch (voiceButtonAction) {
- case VOICEBUTTON_ACTION_DISCARD_CURRENT_KEY_PRESS:
- if (DEBUG_RC) Log.v(TAG, " ignore key event");
- break;
- case VOICEBUTTON_ACTION_START_VOICE_INPUT:
- if (DEBUG_RC) Log.v(TAG, " start voice-based interactions");
- // then start the voice-based interactions
- startVoiceBasedInteractions(needWakeLock);
- break;
- case VOICEBUTTON_ACTION_SIMULATE_KEY_PRESS:
- if (DEBUG_RC) Log.v(TAG, " send simulated key event, wakelock=" + needWakeLock);
- sendSimulatedMediaButtonEvent(keyEvent, needWakeLock);
- break;
- }
- }
-
- private void sendSimulatedMediaButtonEvent(KeyEvent originalKeyEvent, boolean needWakeLock) {
- // send DOWN event
- KeyEvent keyEvent = KeyEvent.changeAction(originalKeyEvent, KeyEvent.ACTION_DOWN);
- dispatchMediaKeyEvent(keyEvent, needWakeLock);
- // send UP event
- keyEvent = KeyEvent.changeAction(originalKeyEvent, KeyEvent.ACTION_UP);
- dispatchMediaKeyEvent(keyEvent, needWakeLock);
-
- }
-
- private static boolean isValidMediaKeyEvent(KeyEvent keyEvent) {
- if (keyEvent == null) {
- return false;
- }
- return KeyEvent.isMediaKey(keyEvent.getKeyCode());
- }
-
- /**
- * Checks whether the given key code is one that can trigger the launch of voice-based
- * interactions.
- * @param keyCode the key code associated with the key event
- * @return true if the key is one of the supported voice-based interaction triggers
- */
- private static boolean isValidVoiceInputKeyCode(int keyCode) {
- if (keyCode == KeyEvent.KEYCODE_HEADSETHOOK) {
- return true;
- } else {
- return false;
- }
- }
-
- /**
- * Tell the system to start voice-based interactions / voice commands
- */
- private void startVoiceBasedInteractions(boolean needWakeLock) {
- Intent voiceIntent = null;
- // select which type of search to launch:
- // - screen on and device unlocked: action is ACTION_WEB_SEARCH
- // - device locked or screen off: action is ACTION_VOICE_SEARCH_HANDS_FREE
- // with EXTRA_SECURE set to true if the device is securely locked
- PowerManager pm = (PowerManager)mContext.getSystemService(Context.POWER_SERVICE);
- boolean isLocked = mKeyguardManager != null && mKeyguardManager.isKeyguardLocked();
- if (!isLocked && pm.isScreenOn()) {
- voiceIntent = new Intent(android.speech.RecognizerIntent.ACTION_WEB_SEARCH);
- Log.i(TAG, "voice-based interactions: about to use ACTION_WEB_SEARCH");
- } else {
- voiceIntent = new Intent(RecognizerIntent.ACTION_VOICE_SEARCH_HANDS_FREE);
- voiceIntent.putExtra(RecognizerIntent.EXTRA_SECURE,
- isLocked && mKeyguardManager.isKeyguardSecure());
- Log.i(TAG, "voice-based interactions: about to use ACTION_VOICE_SEARCH_HANDS_FREE");
- }
- // start the search activity
- if (needWakeLock) {
- mMediaEventWakeLock.acquire();
- }
- final long identity = Binder.clearCallingIdentity();
- try {
- if (voiceIntent != null) {
- voiceIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
- | Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
- mContext.startActivityAsUser(voiceIntent, UserHandle.CURRENT);
- }
- } catch (ActivityNotFoundException e) {
- Log.w(TAG, "No activity for search: " + e);
- } finally {
- Binder.restoreCallingIdentity(identity);
- if (needWakeLock) {
- mMediaEventWakeLock.release();
- }
- }
- }
-
- private static final int WAKELOCK_RELEASE_ON_FINISHED = 1980; //magic number
-
- // only set when wakelock was acquired, no need to check value when received
- private static final String EXTRA_WAKELOCK_ACQUIRED =
- "android.media.AudioService.WAKELOCK_ACQUIRED";
-
- public void onSendFinished(PendingIntent pendingIntent, Intent intent,
- int resultCode, String resultData, Bundle resultExtras) {
- if (resultCode == WAKELOCK_RELEASE_ON_FINISHED) {
- mMediaEventWakeLock.release();
- }
- }
-
- BroadcastReceiver mKeyEventDone = new BroadcastReceiver() {
- public void onReceive(Context context, Intent intent) {
- if (intent == null) {
- return;
- }
- Bundle extras = intent.getExtras();
- if (extras == null) {
- return;
- }
- if (extras.containsKey(EXTRA_WAKELOCK_ACQUIRED)) {
- mMediaEventWakeLock.release();
- }
- }
- };
-
- /**
- * Synchronization on mCurrentRcLock always inside a block synchronized on mPRStack
- */
- private final Object mCurrentRcLock = new Object();
- /**
- * The one remote control client which will receive a request for display information.
- * This object may be null.
- * Access protected by mCurrentRcLock.
- */
- private IRemoteControlClient mCurrentRcClient = null;
- /**
- * The PendingIntent associated with mCurrentRcClient. Its value is irrelevant
- * if mCurrentRcClient is null
- */
- private PendingIntent mCurrentRcClientIntent = null;
-
- private final static int RC_INFO_NONE = 0;
- private final static int RC_INFO_ALL =
- RemoteControlClient.FLAG_INFORMATION_REQUEST_ALBUM_ART |
- RemoteControlClient.FLAG_INFORMATION_REQUEST_KEY_MEDIA |
- RemoteControlClient.FLAG_INFORMATION_REQUEST_METADATA |
- RemoteControlClient.FLAG_INFORMATION_REQUEST_PLAYSTATE;
-
- /**
- * A monotonically increasing generation counter for mCurrentRcClient.
- * Only accessed with a lock on mCurrentRcLock.
- * No value wrap-around issues as we only act on equal values.
- */
- private int mCurrentRcClientGen = 0;
-
-
- /**
- * Internal cache for the playback information of the RemoteControlClient whose volume gets to
- * be controlled by the volume keys ("main"), so we don't have to iterate over the RC stack
- * every time we need this info.
- */
- private RemotePlaybackState mMainRemote;
- /**
- * Indicates whether the "main" RemoteControlClient is considered active.
- * Use synchronized on mMainRemote.
- */
- private boolean mMainRemoteIsActive;
- /**
- * Indicates whether there is remote playback going on. True even if there is no "active"
- * remote playback (mMainRemoteIsActive is false), but a RemoteControlClient has declared it
- * handles remote playback.
- * Use synchronized on mMainRemote.
- */
- private boolean mHasRemotePlayback;
-
- /**
- * The stack of remote control event receivers.
- * All read and write operations on mPRStack are synchronized.
- */
- private final Stack<PlayerRecord> mPRStack = new Stack<PlayerRecord>();
-
- /**
- * The component the telephony package can register so telephony calls have priority to
- * handle media button events
- */
- private ComponentName mMediaReceiverForCalls = null;
-
- /**
- * Helper function:
- * Display in the log the current entries in the remote control focus stack
- */
- private void dumpRCStack(PrintWriter pw) {
- pw.println("\nRemote Control stack entries (last is top of stack):");
- synchronized(mPRStack) {
- Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- stackIterator.next().dump(pw, true);
- }
- }
- }
-
- /**
- * Helper function:
- * Display in the log the current entries in the remote control stack, focusing
- * on RemoteControlClient data
- */
- private void dumpRCCStack(PrintWriter pw) {
- pw.println("\nRemote Control Client stack entries (last is top of stack):");
- synchronized(mPRStack) {
- Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- stackIterator.next().dump(pw, false);
- }
- synchronized(mCurrentRcLock) {
- pw.println("\nCurrent remote control generation ID = " + mCurrentRcClientGen);
- }
- }
- synchronized (mMainRemote) {
- pw.println("\nRemote Volume State:");
- pw.println(" has remote: " + mHasRemotePlayback);
- pw.println(" is remote active: " + mMainRemoteIsActive);
- pw.println(" rccId: " + mMainRemote.mRccId);
- pw.println(" volume handling: "
- + ((mMainRemote.mVolumeHandling == RemoteControlClient.PLAYBACK_VOLUME_FIXED) ?
- "PLAYBACK_VOLUME_FIXED(0)" : "PLAYBACK_VOLUME_VARIABLE(1)"));
- pw.println(" volume: " + mMainRemote.mVolume);
- pw.println(" volume steps: " + mMainRemote.mVolumeMax);
- }
- }
-
- /**
- * Helper function:
- * Display in the log the current entries in the list of remote control displays
- */
- private void dumpRCDList(PrintWriter pw) {
- pw.println("\nRemote Control Display list entries:");
- synchronized(mPRStack) {
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- pw.println(" IRCD: " + di.mRcDisplay +
- " -- w:" + di.mArtworkExpectedWidth +
- " -- h:" + di.mArtworkExpectedHeight +
- " -- wantsPosSync:" + di.mWantsPositionSync +
- " -- " + (di.mEnabled ? "enabled" : "disabled"));
- }
- }
- }
-
- /**
- * Helper function:
- * Push the new media button receiver "near" the top of the PlayerRecord stack.
- * "Near the top" is defined as:
- * - at the top if the current PlayerRecord at the top is not playing
- * - below the entries at the top of the stack that correspond to the playing PlayerRecord
- * otherwise
- * Called synchronized on mPRStack
- * precondition: mediaIntent != null
- * @return true if the top of mPRStack was changed, false otherwise
- */
- private boolean pushMediaButtonReceiver_syncPrs(PendingIntent mediaIntent,
- ComponentName target, IBinder token) {
- if (mPRStack.empty()) {
- mPRStack.push(new PlayerRecord(mediaIntent, target, token));
- return true;
- } else if (mPRStack.peek().hasMatchingMediaButtonIntent(mediaIntent)) {
- // already at top of stack
- return false;
- }
- if (mAppOps.noteOp(AppOpsManager.OP_TAKE_MEDIA_BUTTONS, Binder.getCallingUid(),
- mediaIntent.getCreatorPackage()) != AppOpsManager.MODE_ALLOWED) {
- return false;
- }
- PlayerRecord oldTopPrse = mPRStack.lastElement(); // top of the stack before any changes
- boolean topChanged = false;
- PlayerRecord prse = null;
- int lastPlayingIndex = mPRStack.size();
- int inStackIndex = -1;
- try {
- // go through the stack from the top to figure out who's playing, and the position
- // of this media button receiver (note that it may not be in the stack)
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- prse = mPRStack.elementAt(index);
- if (prse.isPlaybackActive()) {
- lastPlayingIndex = index;
- }
- if (prse.hasMatchingMediaButtonIntent(mediaIntent)) {
- inStackIndex = index;
- }
- }
-
- if (inStackIndex == -1) {
- // is not in stack
- prse = new PlayerRecord(mediaIntent, target, token);
- // it's new so it's not playing (no RemoteControlClient to give a playstate),
- // therefore it goes after the ones with active playback
- mPRStack.add(lastPlayingIndex, prse);
- } else {
- // is in the stack
- if (mPRStack.size() > 1) { // no need to remove and add if stack contains only 1
- prse = mPRStack.elementAt(inStackIndex);
- // remove it from its old location in the stack
- mPRStack.removeElementAt(inStackIndex);
- if (prse.isPlaybackActive()) {
- // and put it at the top
- mPRStack.push(prse);
- } else {
- // and put it after the ones with active playback
- if (inStackIndex > lastPlayingIndex) {
- mPRStack.add(lastPlayingIndex, prse);
- } else {
- mPRStack.add(lastPlayingIndex - 1, prse);
- }
- }
- }
- }
-
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification or bad index
- Log.e(TAG, "Wrong index (inStack=" + inStackIndex + " lastPlaying=" + lastPlayingIndex
- + " size=" + mPRStack.size()
- + " accessing media button stack", e);
- }
-
- return (topChanged);
- }
-
- /**
- * Helper function:
- * Remove the remote control receiver from the RC focus stack.
- * Called synchronized on mPRStack
- * precondition: pi != null
- */
- private void removeMediaButtonReceiver_syncPrs(PendingIntent pi) {
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- if (prse.hasMatchingMediaButtonIntent(pi)) {
- prse.destroy();
- // ok to remove element while traversing the stack since we're leaving the loop
- mPRStack.removeElementAt(index);
- break;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing media button stack, lock error? ", e);
- }
- }
-
- /**
- * Helper function:
- * Called synchronized on mPRStack
- */
- private boolean isCurrentRcController(PendingIntent pi) {
- if (!mPRStack.empty() && mPRStack.peek().hasMatchingMediaButtonIntent(pi)) {
- return true;
- }
- return false;
- }
-
- //==========================================================================================
- // Remote control display / client
- //==========================================================================================
- /**
- * Update the remote control displays with the new "focused" client generation
- */
- private void setNewRcClientOnDisplays_syncRcsCurrc(int newClientGeneration,
- PendingIntent newMediaIntent, boolean clearing) {
- synchronized(mPRStack) {
- if (mRcDisplays.size() > 0) {
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- try {
- di.mRcDisplay.setCurrentClientId(
- newClientGeneration, newMediaIntent, clearing);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead display in setNewRcClientOnDisplays_syncRcsCurrc()",e);
- di.release();
- displayIterator.remove();
- }
- }
- }
- }
- }
-
- /**
- * Update the remote control clients with the new "focused" client generation
- */
- private void setNewRcClientGenerationOnClients_syncRcsCurrc(int newClientGeneration) {
- // (using an iterator on the stack so we can safely remove an entry if needed,
- // traversal order doesn't matter here as we update all entries)
- Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- PlayerRecord se = stackIterator.next();
- if ((se != null) && (se.getRcc() != null)) {
- try {
- se.getRcc().setCurrentClientGenerationId(newClientGeneration);
- } catch (RemoteException e) {
- Log.w(TAG, "Dead client in setNewRcClientGenerationOnClients_syncRcsCurrc()",e);
- stackIterator.remove();
- se.unlinkToRcClientDeath();
- }
- }
- }
- }
-
- /**
- * Update the displays and clients with the new "focused" client generation and name
- * @param newClientGeneration the new generation value matching a client update
- * @param newMediaIntent the media button event receiver associated with the client.
- * May be null, which implies there is no registered media button event receiver.
- * @param clearing true if the new client generation value maps to a remote control update
- * where the display should be cleared.
- */
- private void setNewRcClient_syncRcsCurrc(int newClientGeneration,
- PendingIntent newMediaIntent, boolean clearing) {
- // send the new valid client generation ID to all displays
- setNewRcClientOnDisplays_syncRcsCurrc(newClientGeneration, newMediaIntent, clearing);
- // send the new valid client generation ID to all clients
- setNewRcClientGenerationOnClients_syncRcsCurrc(newClientGeneration);
- }
-
- /**
- * Called when processing MSG_RCDISPLAY_CLEAR event
- */
- private void onRcDisplayClear() {
- if (DEBUG_RC) Log.i(TAG, "Clear remote control display");
-
- synchronized(mPRStack) {
- synchronized(mCurrentRcLock) {
- mCurrentRcClientGen++;
- // synchronously update the displays and clients with the new client generation
- setNewRcClient_syncRcsCurrc(mCurrentRcClientGen,
- null /*newMediaIntent*/, true /*clearing*/);
- }
- }
- }
-
- /**
- * Called when processing MSG_RCDISPLAY_UPDATE event
- */
- private void onRcDisplayUpdate(PlayerRecord prse, int flags /* USED ?*/) {
- synchronized(mPRStack) {
- synchronized(mCurrentRcLock) {
- if ((mCurrentRcClient != null) && (mCurrentRcClient.equals(prse.getRcc()))) {
- if (DEBUG_RC) Log.i(TAG, "Display/update remote control ");
-
- mCurrentRcClientGen++;
- // synchronously update the displays and clients with
- // the new client generation
- setNewRcClient_syncRcsCurrc(mCurrentRcClientGen,
- prse.getMediaButtonIntent() /*newMediaIntent*/,
- false /*clearing*/);
-
- // tell the current client that it needs to send info
- try {
- //TODO change name to informationRequestForAllDisplays()
- mCurrentRcClient.onInformationRequested(mCurrentRcClientGen, flags);
- } catch (RemoteException e) {
- Log.e(TAG, "Current valid remote client is dead: "+e);
- mCurrentRcClient = null;
- }
- } else {
- // the remote control display owner has changed between the
- // the message to update the display was sent, and the time it
- // gets to be processed (now)
- }
- }
- }
- }
-
- /**
- * Called when processing MSG_RCDISPLAY_INIT_INFO event
- * Causes the current RemoteControlClient to send its info (metadata, playstate...) to
- * a single RemoteControlDisplay, NOT all of them, as with MSG_RCDISPLAY_UPDATE.
- */
- private void onRcDisplayInitInfo(IRemoteControlDisplay newRcd, int w, int h) {
- synchronized(mPRStack) {
- synchronized(mCurrentRcLock) {
- if (mCurrentRcClient != null) {
- if (DEBUG_RC) { Log.i(TAG, "Init RCD with current info"); }
- try {
- // synchronously update the new RCD with the current client generation
- // and matching PendingIntent
- newRcd.setCurrentClientId(mCurrentRcClientGen, mCurrentRcClientIntent,
- false);
-
- // tell the current RCC that it needs to send info, but only to the new RCD
- try {
- mCurrentRcClient.informationRequestForDisplay(newRcd, w, h);
- } catch (RemoteException e) {
- Log.e(TAG, "Current valid remote client is dead: ", e);
- mCurrentRcClient = null;
- }
- } catch (RemoteException e) {
- Log.e(TAG, "Dead display in onRcDisplayInitInfo()", e);
- }
- }
- }
- }
- }
-
- /**
- * Helper function:
- * Called synchronized on mPRStack
- */
- private void clearRemoteControlDisplay_syncPrs() {
- synchronized(mCurrentRcLock) {
- mCurrentRcClient = null;
- }
- // will cause onRcDisplayClear() to be called in AudioService's handler thread
- mEventHandler.sendMessage( mEventHandler.obtainMessage(MSG_RCDISPLAY_CLEAR) );
- }
-
- /**
- * Helper function for code readability: only to be called from
- * checkUpdateRemoteControlDisplay_syncPrs() which checks the preconditions for
- * this method.
- * Preconditions:
- * - called synchronized on mPRStack
- * - mPRStack.isEmpty() is false
- */
- private void updateRemoteControlDisplay_syncPrs(int infoChangedFlags) {
- PlayerRecord prse = mPRStack.peek();
- int infoFlagsAboutToBeUsed = infoChangedFlags;
- // this is where we enforce opt-in for information display on the remote controls
- // with the new AudioManager.registerRemoteControlClient() API
- if (prse.getRcc() == null) {
- //Log.w(TAG, "Can't update remote control display with null remote control client");
- clearRemoteControlDisplay_syncPrs();
- return;
- }
- synchronized(mCurrentRcLock) {
- if (!prse.getRcc().equals(mCurrentRcClient)) {
- // new RC client, assume every type of information shall be queried
- infoFlagsAboutToBeUsed = RC_INFO_ALL;
- }
- mCurrentRcClient = prse.getRcc();
- mCurrentRcClientIntent = prse.getMediaButtonIntent();
- }
- // will cause onRcDisplayUpdate() to be called in AudioService's handler thread
- mEventHandler.sendMessage( mEventHandler.obtainMessage(MSG_RCDISPLAY_UPDATE,
- infoFlagsAboutToBeUsed /* arg1 */, 0, prse /* obj, != null */) );
- }
-
- /**
- * Helper function:
- * Called synchronized on mPRStack
- * Check whether the remote control display should be updated, triggers the update if required
- * @param infoChangedFlags the flags corresponding to the remote control client information
- * that has changed, if applicable (checking for the update conditions might trigger a
- * clear, rather than an update event).
- */
- private void checkUpdateRemoteControlDisplay_syncPrs(int infoChangedFlags) {
- // determine whether the remote control display should be refreshed
- // if the player record stack is empty, there is nothing to display, so clear the RC display
- if (mPRStack.isEmpty()) {
- clearRemoteControlDisplay_syncPrs();
- return;
- }
-
- // this is where more rules for refresh go
-
- // refresh conditions were verified: update the remote controls
- // ok to call: synchronized on mPRStack, mPRStack is not empty
- updateRemoteControlDisplay_syncPrs(infoChangedFlags);
- }
-
- /**
- * see AudioManager.registerMediaButtonIntent(PendingIntent pi, ComponentName c)
- * precondition: mediaIntent != null
- */
- protected void registerMediaButtonIntent(PendingIntent mediaIntent, ComponentName eventReceiver,
- IBinder token) {
- Log.i(TAG, " Remote Control registerMediaButtonIntent() for " + mediaIntent);
-
- synchronized(mPRStack) {
- if (pushMediaButtonReceiver_syncPrs(mediaIntent, eventReceiver, token)) {
- // new RC client, assume every type of information shall be queried
- checkUpdateRemoteControlDisplay_syncPrs(RC_INFO_ALL);
- }
- }
- }
-
- /**
- * see AudioManager.unregisterMediaButtonIntent(PendingIntent mediaIntent)
- * precondition: mediaIntent != null, eventReceiver != null
- */
- protected void unregisterMediaButtonIntent(PendingIntent mediaIntent)
- {
- Log.i(TAG, " Remote Control unregisterMediaButtonIntent() for " + mediaIntent);
-
- synchronized(mPRStack) {
- boolean topOfStackWillChange = isCurrentRcController(mediaIntent);
- removeMediaButtonReceiver_syncPrs(mediaIntent);
- if (topOfStackWillChange) {
- // current RC client will change, assume every type of info needs to be queried
- checkUpdateRemoteControlDisplay_syncPrs(RC_INFO_ALL);
- }
- }
- }
-
- protected void unregisterMediaButtonIntentAsync(final PendingIntent mediaIntent) {
- mEventHandler.sendMessage(
- mEventHandler.obtainMessage(MSG_UNREGISTER_MEDIABUTTONINTENT, 0, 0,
- mediaIntent));
- }
-
- /**
- * see AudioManager.registerMediaButtonEventReceiverForCalls(ComponentName c)
- * precondition: c != null
- */
- protected void registerMediaButtonEventReceiverForCalls(ComponentName c) {
- if (mContext.checkCallingPermission("android.permission.MODIFY_PHONE_STATE")
- != PackageManager.PERMISSION_GRANTED) {
- Log.e(TAG, "Invalid permissions to register media button receiver for calls");
- return;
- }
- synchronized(mPRStack) {
- mMediaReceiverForCalls = c;
- }
- }
-
- /**
- * see AudioManager.unregisterMediaButtonEventReceiverForCalls()
- */
- protected void unregisterMediaButtonEventReceiverForCalls() {
- if (mContext.checkCallingPermission("android.permission.MODIFY_PHONE_STATE")
- != PackageManager.PERMISSION_GRANTED) {
- Log.e(TAG, "Invalid permissions to unregister media button receiver for calls");
- return;
- }
- synchronized(mPRStack) {
- mMediaReceiverForCalls = null;
- }
- }
-
- /**
- * see AudioManager.registerRemoteControlClient(ComponentName eventReceiver, ...)
- * @return the unique ID of the PlayerRecord associated with the RemoteControlClient
- * Note: using this method with rcClient == null is a way to "disable" the IRemoteControlClient
- * without modifying the RC stack, but while still causing the display to refresh (will
- * become blank as a result of this)
- */
- protected int registerRemoteControlClient(PendingIntent mediaIntent,
- IRemoteControlClient rcClient, String callingPackageName) {
- if (DEBUG_RC) Log.i(TAG, "Register remote control client rcClient="+rcClient);
- int rccId = RemoteControlClient.RCSE_ID_UNREGISTERED;
- synchronized(mPRStack) {
- // store the new display information
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- if(prse.hasMatchingMediaButtonIntent(mediaIntent)) {
- prse.resetControllerInfoForRcc(rcClient, callingPackageName,
- Binder.getCallingUid());
-
- if (rcClient == null) {
- break;
- }
-
- rccId = prse.getRccId();
-
- // there is a new (non-null) client:
- // give the new client the displays (if any)
- if (mRcDisplays.size() > 0) {
- plugRemoteControlDisplaysIntoClient_syncPrs(prse.getRcc());
- }
- break;
- }
- }//for
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing RC stack, lock error? ", e);
- }
-
- // if the eventReceiver is at the top of the stack
- // then check for potential refresh of the remote controls
- if (isCurrentRcController(mediaIntent)) {
- checkUpdateRemoteControlDisplay_syncPrs(RC_INFO_ALL);
- }
- }//synchronized(mPRStack)
- return rccId;
- }
-
- /**
- * see AudioManager.unregisterRemoteControlClient(PendingIntent pi, ...)
- * rcClient is guaranteed non-null
- */
- protected void unregisterRemoteControlClient(PendingIntent mediaIntent,
- IRemoteControlClient rcClient) {
- if (DEBUG_RC) Log.i(TAG, "Unregister remote control client rcClient="+rcClient);
- synchronized(mPRStack) {
- boolean topRccChange = false;
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- if ((prse.hasMatchingMediaButtonIntent(mediaIntent))
- && rcClient.equals(prse.getRcc())) {
- // we found the IRemoteControlClient to unregister
- prse.resetControllerInfoForNoRcc();
- topRccChange = (index == mPRStack.size()-1);
- // there can only be one matching RCC in the RC stack, we're done
- break;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing RC stack, lock error? ", e);
- }
- if (topRccChange) {
- // no more RCC for the RCD, check for potential refresh of the remote controls
- checkUpdateRemoteControlDisplay_syncPrs(RC_INFO_ALL);
- }
- }
- }
-
-
- /**
- * A class to encapsulate all the information about a remote control display.
- * After instanciation, init() must always be called before the object is added in the list
- * of displays.
- * Before being removed from the list of displays, release() must always be called (otherwise
- * it will leak death handlers).
- */
- private class DisplayInfoForServer implements IBinder.DeathRecipient {
- /** may never be null */
- private final IRemoteControlDisplay mRcDisplay;
- private final IBinder mRcDisplayBinder;
- private int mArtworkExpectedWidth = -1;
- private int mArtworkExpectedHeight = -1;
- private boolean mWantsPositionSync = false;
- private ComponentName mClientNotifListComp;
- private boolean mEnabled = true;
-
- public DisplayInfoForServer(IRemoteControlDisplay rcd, int w, int h) {
- if (DEBUG_RC) Log.i(TAG, "new DisplayInfoForServer for " + rcd + " w=" + w + " h=" + h);
- mRcDisplay = rcd;
- mRcDisplayBinder = rcd.asBinder();
- mArtworkExpectedWidth = w;
- mArtworkExpectedHeight = h;
- }
-
- public boolean init() {
- try {
- mRcDisplayBinder.linkToDeath(this, 0);
- } catch (RemoteException e) {
- // remote control display is DOA, disqualify it
- Log.w(TAG, "registerRemoteControlDisplay() has a dead client " + mRcDisplayBinder);
- return false;
- }
- return true;
- }
-
- public void release() {
- try {
- mRcDisplayBinder.unlinkToDeath(this, 0);
- } catch (java.util.NoSuchElementException e) {
- // not much we can do here, the display should have been unregistered anyway
- Log.e(TAG, "Error in DisplaInfoForServer.relase()", e);
- }
- }
-
- public void binderDied() {
- synchronized(mPRStack) {
- Log.w(TAG, "RemoteControl: display " + mRcDisplay + " died");
- // remove the display from the list
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- if (di.mRcDisplay == mRcDisplay) {
- if (DEBUG_RC) Log.w(TAG, " RCD removed from list");
- displayIterator.remove();
- return;
- }
- }
- }
- }
- }
-
- /**
- * The remote control displays.
- * Access synchronized on mPRStack
- */
- private ArrayList<DisplayInfoForServer> mRcDisplays = new ArrayList<DisplayInfoForServer>(1);
-
- /**
- * Plug each registered display into the specified client
- * @param rcc, guaranteed non null
- */
- private void plugRemoteControlDisplaysIntoClient_syncPrs(IRemoteControlClient rcc) {
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- try {
- rcc.plugRemoteControlDisplay(di.mRcDisplay, di.mArtworkExpectedWidth,
- di.mArtworkExpectedHeight);
- if (di.mWantsPositionSync) {
- rcc.setWantsSyncForDisplay(di.mRcDisplay, true);
- }
- } catch (RemoteException e) {
- Log.e(TAG, "Error connecting RCD to RCC in RCC registration",e);
- }
- }
- }
-
- private void enableRemoteControlDisplayForClient_syncRcStack(IRemoteControlDisplay rcd,
- boolean enabled) {
- // let all the remote control clients know whether the given display is enabled
- // (so the remote control stack traversal order doesn't matter).
- final Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- PlayerRecord prse = stackIterator.next();
- if(prse.getRcc() != null) {
- try {
- prse.getRcc().enableRemoteControlDisplay(rcd, enabled);
- } catch (RemoteException e) {
- Log.e(TAG, "Error connecting RCD to client: ", e);
- }
- }
- }
- }
-
- /**
- * Is the remote control display interface already registered
- * @param rcd
- * @return true if the IRemoteControlDisplay is already in the list of displays
- */
- private boolean rcDisplayIsPluggedIn_syncRcStack(IRemoteControlDisplay rcd) {
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- if (di.mRcDisplay.asBinder().equals(rcd.asBinder())) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Register an IRemoteControlDisplay.
- * Notify all IRemoteControlClient of the new display and cause the RemoteControlClient
- * at the top of the stack to update the new display with its information.
- * @see android.media.IAudioService#registerRemoteControlDisplay(android.media.IRemoteControlDisplay, int, int)
- * @param rcd the IRemoteControlDisplay to register. No effect if null.
- * @param w the maximum width of the expected bitmap. Negative or zero values indicate this
- * display doesn't need to receive artwork.
- * @param h the maximum height of the expected bitmap. Negative or zero values indicate this
- * display doesn't need to receive artwork.
- * @param listenerComp the component for the listener interface, may be null if it's not needed
- * to verify it belongs to one of the enabled notification listeners
- */
- private void registerRemoteControlDisplay_int(IRemoteControlDisplay rcd, int w, int h,
- ComponentName listenerComp) {
- if (DEBUG_RC) Log.d(TAG, ">>> registerRemoteControlDisplay("+rcd+")");
- synchronized(mAudioFocusLock) {
- synchronized(mPRStack) {
- if ((rcd == null) || rcDisplayIsPluggedIn_syncRcStack(rcd)) {
- return;
- }
- DisplayInfoForServer di = new DisplayInfoForServer(rcd, w, h);
- di.mEnabled = true;
- di.mClientNotifListComp = listenerComp;
- if (!di.init()) {
- if (DEBUG_RC) Log.e(TAG, " error registering RCD");
- return;
- }
- // add RCD to list of displays
- mRcDisplays.add(di);
-
- // let all the remote control clients know there is a new display (so the remote
- // control stack traversal order doesn't matter).
- Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- PlayerRecord prse = stackIterator.next();
- if(prse.getRcc() != null) {
- try {
- prse.getRcc().plugRemoteControlDisplay(rcd, w, h);
- } catch (RemoteException e) {
- Log.e(TAG, "Error connecting RCD to client: ", e);
- }
- }
- }
-
- // we have a new display, of which all the clients are now aware: have it be
- // initialized wih the current gen ID and the current client info, do not
- // reset the information for the other (existing) displays
- sendMsg(mEventHandler, MSG_RCDISPLAY_INIT_INFO, SENDMSG_QUEUE,
- w /*arg1*/, h /*arg2*/,
- rcd /*obj*/, 0/*delay*/);
- }
- }
- }
-
- /**
- * Unregister an IRemoteControlDisplay.
- * No effect if the IRemoteControlDisplay hasn't been successfully registered.
- * @see android.media.IAudioService#unregisterRemoteControlDisplay(android.media.IRemoteControlDisplay)
- * @param rcd the IRemoteControlDisplay to unregister. No effect if null.
- */
- protected void unregisterRemoteControlDisplay(IRemoteControlDisplay rcd) {
- if (DEBUG_RC) Log.d(TAG, "<<< unregisterRemoteControlDisplay("+rcd+")");
- synchronized(mPRStack) {
- if (rcd == null) {
- return;
- }
-
- boolean displayWasPluggedIn = false;
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext() && !displayWasPluggedIn) {
- final DisplayInfoForServer di = displayIterator.next();
- if (di.mRcDisplay.asBinder().equals(rcd.asBinder())) {
- displayWasPluggedIn = true;
- di.release();
- displayIterator.remove();
- }
- }
-
- if (displayWasPluggedIn) {
- // disconnect this remote control display from all the clients, so the remote
- // control stack traversal order doesn't matter
- final Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- final PlayerRecord prse = stackIterator.next();
- if(prse.getRcc() != null) {
- try {
- prse.getRcc().unplugRemoteControlDisplay(rcd);
- } catch (RemoteException e) {
- Log.e(TAG, "Error disconnecting remote control display to client: ", e);
- }
- }
- }
- } else {
- if (DEBUG_RC) Log.w(TAG, " trying to unregister unregistered RCD");
- }
- }
- }
-
- /**
- * Update the size of the artwork used by an IRemoteControlDisplay.
- * @see android.media.IAudioService#remoteControlDisplayUsesBitmapSize(android.media.IRemoteControlDisplay, int, int)
- * @param rcd the IRemoteControlDisplay with the new artwork size requirement
- * @param w the maximum width of the expected bitmap. Negative or zero values indicate this
- * display doesn't need to receive artwork.
- * @param h the maximum height of the expected bitmap. Negative or zero values indicate this
- * display doesn't need to receive artwork.
- */
- protected void remoteControlDisplayUsesBitmapSize(IRemoteControlDisplay rcd, int w, int h) {
- synchronized(mPRStack) {
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- boolean artworkSizeUpdate = false;
- while (displayIterator.hasNext() && !artworkSizeUpdate) {
- final DisplayInfoForServer di = displayIterator.next();
- if (di.mRcDisplay.asBinder().equals(rcd.asBinder())) {
- if ((di.mArtworkExpectedWidth != w) || (di.mArtworkExpectedHeight != h)) {
- di.mArtworkExpectedWidth = w;
- di.mArtworkExpectedHeight = h;
- artworkSizeUpdate = true;
- }
- }
- }
- if (artworkSizeUpdate) {
- // RCD is currently plugged in and its artwork size has changed, notify all RCCs,
- // stack traversal order doesn't matter
- final Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while(stackIterator.hasNext()) {
- final PlayerRecord prse = stackIterator.next();
- if(prse.getRcc() != null) {
- try {
- prse.getRcc().setBitmapSizeForDisplay(rcd, w, h);
- } catch (RemoteException e) {
- Log.e(TAG, "Error setting bitmap size for RCD on RCC: ", e);
- }
- }
- }
- }
- }
- }
-
- /**
- * Controls whether a remote control display needs periodic checks of the RemoteControlClient
- * playback position to verify that the estimated position has not drifted from the actual
- * position. By default the check is not performed.
- * The IRemoteControlDisplay must have been previously registered for this to have any effect.
- * @param rcd the IRemoteControlDisplay for which the anti-drift mechanism will be enabled
- * or disabled. Not null.
- * @param wantsSync if true, RemoteControlClient instances which expose their playback position
- * to the framework will regularly compare the estimated playback position with the actual
- * position, and will update the IRemoteControlDisplay implementation whenever a drift is
- * detected.
- */
- protected void remoteControlDisplayWantsPlaybackPositionSync(IRemoteControlDisplay rcd,
- boolean wantsSync) {
- synchronized(mPRStack) {
- boolean rcdRegistered = false;
- // store the information about this display
- // (display stack traversal order doesn't matter).
- final Iterator<DisplayInfoForServer> displayIterator = mRcDisplays.iterator();
- while (displayIterator.hasNext()) {
- final DisplayInfoForServer di = displayIterator.next();
- if (di.mRcDisplay.asBinder().equals(rcd.asBinder())) {
- di.mWantsPositionSync = wantsSync;
- rcdRegistered = true;
- break;
- }
- }
- if (!rcdRegistered) {
- return;
- }
- // notify all current RemoteControlClients
- // (stack traversal order doesn't matter as we notify all RCCs)
- final Iterator<PlayerRecord> stackIterator = mPRStack.iterator();
- while (stackIterator.hasNext()) {
- final PlayerRecord prse = stackIterator.next();
- if (prse.getRcc() != null) {
- try {
- prse.getRcc().setWantsSyncForDisplay(rcd, wantsSync);
- } catch (RemoteException e) {
- Log.e(TAG, "Error setting position sync flag for RCD on RCC: ", e);
- }
- }
- }
- }
- }
-
- // handler for MSG_RCC_NEW_VOLUME_OBS
- private void onRegisterVolumeObserverForRcc(int rccId, IRemoteVolumeObserver rvo) {
- synchronized(mPRStack) {
- // The stack traversal order doesn't matter because there is only one stack entry
- // with this RCC ID, but the matching ID is more likely at the top of the stack, so
- // start iterating from the top.
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- if (prse.getRccId() == rccId) {
- prse.mRemoteVolumeObs = rvo;
- break;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing media button stack, lock error? ", e);
- }
- }
- }
-
- /**
- * Checks if a remote client is active on the supplied stream type. Update the remote stream
- * volume state if found and playing
- * @param streamType
- * @return false if no remote playing is currently playing
- */
- protected boolean checkUpdateRemoteStateIfActive(int streamType) {
- synchronized(mPRStack) {
- // iterating from top of stack as active playback is more likely on entries at the top
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- if ((prse.mPlaybackType == RemoteControlClient.PLAYBACK_TYPE_REMOTE)
- && isPlaystateActive(prse.mPlaybackState.mState)
- && (prse.mPlaybackStream == streamType)) {
- if (DEBUG_RC) Log.d(TAG, "remote playback active on stream " + streamType
- + ", vol =" + prse.mPlaybackVolume);
- synchronized (mMainRemote) {
- mMainRemote.mRccId = prse.getRccId();
- mMainRemote.mVolume = prse.mPlaybackVolume;
- mMainRemote.mVolumeMax = prse.mPlaybackVolumeMax;
- mMainRemote.mVolumeHandling = prse.mPlaybackVolumeHandling;
- mMainRemoteIsActive = true;
- }
- return true;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing RC stack, lock error? ", e);
- }
- }
- synchronized (mMainRemote) {
- mMainRemoteIsActive = false;
- }
- return false;
- }
-
- /**
- * Returns true if the given playback state is considered "active", i.e. it describes a state
- * where playback is happening, or about to
- * @param playState the playback state to evaluate
- * @return true if active, false otherwise (inactive or unknown)
- */
- protected static boolean isPlaystateActive(int playState) {
- switch (playState) {
- case RemoteControlClient.PLAYSTATE_PLAYING:
- case RemoteControlClient.PLAYSTATE_BUFFERING:
- case RemoteControlClient.PLAYSTATE_FAST_FORWARDING:
- case RemoteControlClient.PLAYSTATE_REWINDING:
- case RemoteControlClient.PLAYSTATE_SKIPPING_BACKWARDS:
- case RemoteControlClient.PLAYSTATE_SKIPPING_FORWARDS:
- return true;
- default:
- return false;
- }
- }
-
- private void sendVolumeUpdateToRemote(int rccId, int direction) {
- if (DEBUG_VOL) { Log.d(TAG, "sendVolumeUpdateToRemote(rccId="+rccId+" , dir="+direction); }
- if (direction == 0) {
- // only handling discrete events
- return;
- }
- IRemoteVolumeObserver rvo = null;
- synchronized (mPRStack) {
- // The stack traversal order doesn't matter because there is only one stack entry
- // with this RCC ID, but the matching ID is more likely at the top of the stack, so
- // start iterating from the top.
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- //FIXME OPTIMIZE store this info in mMainRemote so we don't have to iterate?
- if (prse.getRccId() == rccId) {
- rvo = prse.mRemoteVolumeObs;
- break;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing media button stack, lock error? ", e);
- }
- }
- if (rvo != null) {
- try {
- rvo.dispatchRemoteVolumeUpdate(direction, -1);
- } catch (RemoteException e) {
- Log.e(TAG, "Error dispatching relative volume update", e);
- }
- }
- }
-
- protected int getRemoteStreamMaxVolume() {
- synchronized (mMainRemote) {
- if (mMainRemote.mRccId == RemoteControlClient.RCSE_ID_UNREGISTERED) {
- return 0;
- }
- return mMainRemote.mVolumeMax;
- }
- }
-
- protected int getRemoteStreamVolume() {
- synchronized (mMainRemote) {
- if (mMainRemote.mRccId == RemoteControlClient.RCSE_ID_UNREGISTERED) {
- return 0;
- }
- return mMainRemote.mVolume;
- }
- }
-
- protected void setRemoteStreamVolume(int vol) {
- if (DEBUG_VOL) { Log.d(TAG, "setRemoteStreamVolume(vol="+vol+")"); }
- int rccId = RemoteControlClient.RCSE_ID_UNREGISTERED;
- synchronized (mMainRemote) {
- if (mMainRemote.mRccId == RemoteControlClient.RCSE_ID_UNREGISTERED) {
- return;
- }
- rccId = mMainRemote.mRccId;
- }
- IRemoteVolumeObserver rvo = null;
- synchronized (mPRStack) {
- // The stack traversal order doesn't matter because there is only one stack entry
- // with this RCC ID, but the matching ID is more likely at the top of the stack, so
- // start iterating from the top.
- try {
- for (int index = mPRStack.size()-1; index >= 0; index--) {
- final PlayerRecord prse = mPRStack.elementAt(index);
- //FIXME OPTIMIZE store this info in mMainRemote so we don't have to iterate?
- if (prse.getRccId() == rccId) {
- rvo = prse.mRemoteVolumeObs;
- break;
- }
- }
- } catch (ArrayIndexOutOfBoundsException e) {
- // not expected to happen, indicates improper concurrent modification
- Log.e(TAG, "Wrong index accessing media button stack, lock error? ", e);
- }
- }
- if (rvo != null) {
- try {
- rvo.dispatchRemoteVolumeUpdate(0, vol);
- } catch (RemoteException e) {
- Log.e(TAG, "Error dispatching absolute volume update", e);
- }
- }
- }
-
- /**
- * Call to make AudioService reevaluate whether it's in a mode where remote players should
- * have their volume controlled. In this implementation this is only to reset whether
- * VolumePanel should display remote volumes
- */
- protected void postReevaluateRemote() {
- sendMsg(mEventHandler, MSG_REEVALUATE_REMOTE, SENDMSG_QUEUE, 0, 0, null, 0);
- }
-
- private void onReevaluateRemote() {
- // TODO This was used to notify VolumePanel if there was remote playback
- // in the stack. This is now in MediaSessionService. More code should be
- // removed.
- }
-
-}
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index f547afb..b2fa0ac 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -106,9 +106,6 @@ public final class MediaFormat {
public static final String MIMETYPE_AUDIO_FLAC = "audio/flac";
public static final String MIMETYPE_AUDIO_MSGSM = "audio/gsm";
public static final String MIMETYPE_AUDIO_AC3 = "audio/ac3";
- /**
- * @hide
- */
public static final String MIMETYPE_AUDIO_EAC3 = "audio/eac3";
/**
@@ -232,11 +229,22 @@ public final class MediaFormat {
public static final String KEY_TEMPORAL_LAYERING = "ts-schema";
/**
- * @hide
+ * A key describing the stride of the video bytebuffer layout.
+ * Stride (or row increment) is the difference between the index of a pixel
+ * and that of the pixel directly underneath. For YUV 420 formats, the
+ * stride corresponds to the Y plane; the stride of the U and V planes can
+ * be calculated based on the color format.
+ * The associated value is an integer, representing number of bytes.
*/
public static final String KEY_STRIDE = "stride";
+
/**
- * @hide
+ * A key describing the plane height of a multi-planar (YUV) video bytebuffer layout.
+ * Slice height (or plane height) is the number of rows that must be skipped to get
+ * from the top of the Y plane to the top of the U plane in the bytebuffer. In essence
+ * the offset of the U plane is sliceHeight * stride. The height of the U/V planes
+ * can be calculated based on the color format.
+ * The associated value is an integer, representing number of rows.
*/
public static final String KEY_SLICE_HEIGHT = "slice-height";
@@ -420,15 +428,74 @@ public final class MediaFormat {
public static final String KEY_QUALITY = "quality";
/**
+ * A key describing the desired codec priority.
+ * <p>
+ * The associated value is an integer. Higher value means lower priority.
+ * <p>
+ * Currently, only two levels are supported:<br>
+ * 0: realtime priority - meaning that the codec shall support the given
+ * performance configuration (e.g. framerate) at realtime. This should
+ * only be used by media playback, capture, and possibly by realtime
+ * communication scenarios if best effort performance is not suitable.<br>
+ * 1: non-realtime priority (best effort).
+ * <p>
+ * This is a hint used at codec configuration and resource planning - to understand
+ * the realtime requirements of the application; however, due to the nature of
+ * media components, performance is not guaranteed.
+ *
+ */
+ public static final String KEY_PRIORITY = "priority";
+
+ /**
+ * A key describing the desired operating frame rate for video or sample rate for audio
+ * that the codec will need to operate at.
+ * <p>
+ * The associated value is an integer or a float representing frames-per-second or
+ * samples-per-second
+ * <p>
+ * This is used for cases like high-speed/slow-motion video capture, where the video encoder
+ * format contains the target playback rate (e.g. 30fps), but the component must be able to
+ * handle the high operating capture rate (e.g. 240fps).
+ * <p>
+ * This rate will be used by codec for resource planning and setting the operating points.
+ *
+ */
+ public static final String KEY_OPERATING_RATE = "operating-rate";
+
+ /**
* A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
* Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
- * This key is only supported for codecs that specify a profile.
+ * This key is used as a hint, and is only supported for codecs
+ * that specify a profile.
*
* @see MediaCodecInfo.CodecCapabilities#profileLevels
*/
public static final String KEY_PROFILE = "profile";
/**
+ * A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
+ * Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * This key is used as a further hint when specifying a desired profile,
+ * and is only supported for codecs that specify a level.
+ * <p>
+ * This key is ignored if the {@link #KEY_PROFILE profile} is not specified.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_LEVEL = "level";
+
+ /**
+ * A key describing the desired clockwise rotation on an output surface.
+ * This key is only used when the codec is configured using an output surface.
+ * The associated value is an integer, representing degrees.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_ROTATION = "rotation-degrees";
+
+ /**
* A key describing the desired bitrate mode to be used by an encoder.
* Constants are declared in {@link MediaCodecInfo.CodecCapabilities}.
*
diff --git a/media/java/android/media/MediaHTTPConnection.java b/media/java/android/media/MediaHTTPConnection.java
index 541d871..d6bf421 100644
--- a/media/java/android/media/MediaHTTPConnection.java
+++ b/media/java/android/media/MediaHTTPConnection.java
@@ -43,6 +43,9 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
private static final String TAG = "MediaHTTPConnection";
private static final boolean VERBOSE = false;
+ // connection timeout - 30 sec
+ private static final int CONNECT_TIMEOUT_MS = 30 * 1000;
+
private long mCurrentOffset = -1;
private URL mURL = null;
private Map<String, String> mHeaders = null;
@@ -182,6 +185,7 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
} else {
mConnection = (HttpURLConnection)url.openConnection();
}
+ mConnection.setConnectTimeout(CONNECT_TIMEOUT_MS);
// handle redirects ourselves if we do not allow cross-domain redirect
mConnection.setInstanceFollowRedirects(mAllowCrossDomainRedirect);
@@ -341,7 +345,7 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
} catch (UnknownServiceException e) {
Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
return MEDIA_ERROR_UNSUPPORTED;
- }catch (IOException e) {
+ } catch (IOException e) {
if (VERBOSE) {
Log.d(TAG, "readAt " + offset + " / " + size + " => -1");
}
diff --git a/media/java/android/media/MediaHTTPService.java b/media/java/android/media/MediaHTTPService.java
index 3b4703d..2348ab7 100644
--- a/media/java/android/media/MediaHTTPService.java
+++ b/media/java/android/media/MediaHTTPService.java
@@ -16,9 +16,7 @@
package android.media;
-import android.os.Binder;
import android.os.IBinder;
-import android.util.Log;
/** @hide */
public class MediaHTTPService extends IMediaHTTPService.Stub {
diff --git a/media/java/android/media/MediaMetadata.java b/media/java/android/media/MediaMetadata.java
index 754da0e..39bcef5 100644
--- a/media/java/android/media/MediaMetadata.java
+++ b/media/java/android/media/MediaMetadata.java
@@ -30,7 +30,6 @@ import android.util.ArrayMap;
import android.util.Log;
import android.util.SparseArray;
-import java.util.ArrayList;
import java.util.Set;
/**
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index 9a69c06..a3ff080 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -203,7 +203,20 @@ public class MediaMetadataRetriever
}
/**
- * Call this method after setDataSource(). This method retrieves the
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException;
+
+ /**
+ * Call this method after setDataSource(). This method retrieves the
* meta data value associated with the keyCode.
*
* The keyCode currently supported is listed below as METADATA_XXX
@@ -498,5 +511,11 @@ public class MediaMetadataRetriever
* The video rotation angle may be 0, 90, 180, or 270 degrees.
*/
public static final int METADATA_KEY_VIDEO_ROTATION = 24;
+ /**
+ * This key retrieves the original capture framerate, if it's
+ * available. The capture framerate will be a floating point
+ * number.
+ */
+ public static final int METADATA_KEY_CAPTURE_FRAMERATE = 25;
// Add more here...
}
diff --git a/media/java/android/media/MediaMuxer.java b/media/java/android/media/MediaMuxer.java
index f518ab2..4b6b4fa 100644
--- a/media/java/android/media/MediaMuxer.java
+++ b/media/java/android/media/MediaMuxer.java
@@ -16,12 +16,18 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import dalvik.system.CloseGuard;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.RandomAccessFile;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.util.Map;
@@ -80,19 +86,27 @@ final public class MediaMuxer {
public static final int MUXER_OUTPUT_WEBM = 1;
};
+ /** @hide */
+ @IntDef({
+ OutputFormat.MUXER_OUTPUT_MPEG_4,
+ OutputFormat.MUXER_OUTPUT_WEBM,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Format {}
+
// All the native functions are listed here.
- private static native long nativeSetup(FileDescriptor fd, int format);
+ private static native long nativeSetup(@NonNull FileDescriptor fd, int format);
private static native void nativeRelease(long nativeObject);
private static native void nativeStart(long nativeObject);
private static native void nativeStop(long nativeObject);
- private static native int nativeAddTrack(long nativeObject, String[] keys,
- Object[] values);
- private static native void nativeSetOrientationHint(long nativeObject,
- int degrees);
+ private static native int nativeAddTrack(
+ long nativeObject, @NonNull String[] keys, @NonNull Object[] values);
+ private static native void nativeSetOrientationHint(
+ long nativeObject, int degrees);
private static native void nativeSetLocation(long nativeObject, int latitude, int longitude);
- private static native void nativeWriteSampleData(long nativeObject,
- int trackIndex, ByteBuffer byteBuf,
- int offset, int size, long presentationTimeUs, int flags);
+ private static native void nativeWriteSampleData(
+ long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf,
+ int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags);
// Muxer internal states.
private static final int MUXER_STATE_UNINITIALIZED = -1;
@@ -115,7 +129,7 @@ final public class MediaMuxer {
* @see android.media.MediaMuxer.OutputFormat
* @throws IOException if failed to open the file for write
*/
- public MediaMuxer(String path, int format) throws IOException {
+ public MediaMuxer(@NonNull String path, @Format int format) throws IOException {
if (path == null) {
throw new IllegalArgumentException("path must not be null");
}
@@ -246,11 +260,12 @@ final public class MediaMuxer {
/**
* Adds a track with the specified format.
- * @param format The media format for the track.
+ * @param format The media format for the track. This must not be an empty
+ * MediaFormat.
* @return The track index for this newly added track, and it should be used
* in the {@link #writeSampleData}.
*/
- public int addTrack(MediaFormat format) {
+ public int addTrack(@NonNull MediaFormat format) {
if (format == null) {
throw new IllegalArgumentException("format must not be null.");
}
@@ -302,8 +317,8 @@ final public class MediaMuxer {
* MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo},
* to signal sync frames.
*/
- public void writeSampleData(int trackIndex, ByteBuffer byteBuf,
- BufferInfo bufferInfo) {
+ public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf,
+ @NonNull BufferInfo bufferInfo) {
if (trackIndex < 0 || trackIndex > mLastTrackIndex) {
throw new IllegalArgumentException("trackIndex is invalid");
}
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index 615dac2..668f80a 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -16,14 +16,13 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.app.ActivityThread;
import android.app.AppOpsManager;
-import android.app.Application;
-import android.content.BroadcastReceiver;
import android.content.ContentResolver;
import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
import android.content.res.AssetFileDescriptor;
import android.net.Uri;
import android.os.Handler;
@@ -41,16 +40,20 @@ import android.provider.Settings;
import android.system.ErrnoException;
import android.system.OsConstants;
import android.util.Log;
+import android.util.Pair;
import android.view.Surface;
import android.view.SurfaceHolder;
+import android.widget.VideoView;
import android.graphics.SurfaceTexture;
import android.media.AudioManager;
import android.media.MediaFormat;
import android.media.MediaTimeProvider;
+import android.media.PlaybackParams;
import android.media.SubtitleController;
import android.media.SubtitleController.Anchor;
import android.media.SubtitleData;
import android.media.SubtitleTrack.RenderingWidget;
+import android.media.SyncParams;
import com.android.internal.app.IAppOpsService;
@@ -64,13 +67,17 @@ import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.lang.Runnable;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.net.InetSocketAddress;
+import java.util.BitSet;
+import java.util.HashSet;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.Vector;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.lang.ref.WeakReference;
/**
@@ -183,7 +190,8 @@ import java.lang.ref.WeakReference;
* {@link #setDataSource(FileDescriptor)}, or
* {@link #setDataSource(String)}, or
* {@link #setDataSource(Context, Uri)}, or
- * {@link #setDataSource(FileDescriptor, long, long)} transfers a
+ * {@link #setDataSource(FileDescriptor, long, long)}, or
+ * {@link #setDataSource(MediaDataSource)} transfers a
* MediaPlayer object in the <em>Idle</em> state to the
* <em>Initialized</em> state.
* <ul>
@@ -472,6 +480,16 @@ import java.lang.ref.WeakReference;
* <td>{} </p></td>
* <td>This method can be called in any state and calling it does not change
* the object state. </p></td></tr>
+ * <tr><td>setPlaybackRate</p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setPlaybackParams</p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
* <tr><td>setScreenOnWhilePlaying</></td>
* <td>any </p></td>
* <td>{} </p></td>
@@ -604,6 +622,10 @@ public class MediaPlayer implements SubtitleController.Listener
private final IAppOpsService mAppOps;
private int mStreamType = AudioManager.USE_DEFAULT_STREAM_TYPE;
private int mUsage = -1;
+ private boolean mBypassInterruptionPolicy;
+
+ // use AtomicBoolean instead of boolean so we can use the same member both as a flag and a lock.
+ private AtomicBoolean mPreparing = new AtomicBoolean();
/**
* Default constructor. Consider using one of the create() methods for
@@ -624,9 +646,7 @@ public class MediaPlayer implements SubtitleController.Listener
}
mTimeProvider = new TimeProvider(this);
- mOutOfBandSubtitleTracks = new Vector<SubtitleTrack>();
mOpenSubtitleSources = new Vector<InputStream>();
- mInbandSubtitleTracks = new SubtitleTrack[0];
IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
mAppOps = IAppOpsService.Stub.asInterface(b);
@@ -1123,6 +1143,20 @@ public class MediaPlayer implements SubtitleController.Listener
throws IOException, IllegalArgumentException, IllegalStateException;
/**
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException;
+
+ /**
* Prepares the player for playback, synchronously.
*
* After setting the datasource and the display surface, you need to either
@@ -1132,6 +1166,10 @@ public class MediaPlayer implements SubtitleController.Listener
* @throws IllegalStateException if it is called in an invalid state
*/
public void prepare() throws IOException, IllegalStateException {
+ // The synchronous version of prepare also recieves a MEDIA_PREPARED message.
+ synchronized (mPreparing) {
+ mPreparing.set(true);
+ }
_prepare();
scanInternalSubtitleTracks();
}
@@ -1148,7 +1186,14 @@ public class MediaPlayer implements SubtitleController.Listener
*
* @throws IllegalStateException if it is called in an invalid state
*/
- public native void prepareAsync() throws IllegalStateException;
+ public void prepareAsync() throws IllegalStateException {
+ synchronized (mPreparing) {
+ mPreparing.set(true);
+ }
+ _prepareAsync();
+ }
+
+ private native void _prepareAsync() throws IllegalStateException;
/**
* Starts or resumes playback. If playback had previously been paused,
@@ -1169,6 +1214,9 @@ public class MediaPlayer implements SubtitleController.Listener
private native void _start() throws IllegalStateException;
private boolean isRestricted() {
+ if (mBypassInterruptionPolicy) {
+ return false;
+ }
try {
final int usage = mUsage != -1 ? mUsage
: AudioAttributes.usageForLegacyStreamType(getAudioStreamType());
@@ -1196,6 +1244,9 @@ public class MediaPlayer implements SubtitleController.Listener
* initialized.
*/
public void stop() throws IllegalStateException {
+ synchronized (mPreparing) {
+ mPreparing.set(false);
+ }
stayAwake(false);
_stop();
}
@@ -1320,6 +1371,135 @@ public class MediaPlayer implements SubtitleController.Listener
public native boolean isPlaying();
/**
+ * Change playback speed of audio by resampling the audio.
+ * <p>
+ * Specifies resampling as audio mode for variable rate playback, i.e.,
+ * resample the waveform based on the requested playback rate to get
+ * a new waveform, and play back the new waveform at the original sampling
+ * frequency.
+ * When rate is larger than 1.0, pitch becomes higher.
+ * When rate is smaller than 1.0, pitch becomes lower.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
+
+ /**
+ * Change playback speed of audio without changing its pitch.
+ * <p>
+ * Specifies time stretching as audio mode for variable rate playback.
+ * Time stretching changes the duration of the audio samples without
+ * affecting its pitch.
+ * <p>
+ * This mode is only supported for a limited range of playback speed factors,
+ * e.g. between 1/2x and 2x.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
+
+ /**
+ * Change playback speed of audio without changing its pitch, and
+ * possibly mute audio if time stretching is not supported for the playback
+ * speed.
+ * <p>
+ * Try to keep audio pitch when changing the playback rate, but allow the
+ * system to determine how to change audio playback if the rate is out
+ * of range.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
+ PLAYBACK_RATE_AUDIO_MODE_STRETCH,
+ PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlaybackRateAudioMode {}
+
+ /**
+ * Sets playback rate and audio mode.
+ *
+ * @param rate the ratio between desired playback rate and normal one.
+ * @param audioMode audio playback mode. Must be one of the supported
+ * audio modes.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if audioMode is not supported.
+ *
+ * @hide
+ */
+ @NonNull
+ public PlaybackParams easyPlaybackParams(float rate, @PlaybackRateAudioMode int audioMode) {
+ PlaybackParams params = new PlaybackParams();
+ params.allowDefaults();
+ switch (audioMode) {
+ case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
+ params.setSpeed(rate).setPitch(1.0f);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
+ params.setSpeed(rate).setPitch(1.0f)
+ .setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
+ params.setSpeed(rate).setPitch(rate);
+ break;
+ default:
+ final String msg = "Audio playback mode " + audioMode + " is not supported";
+ throw new IllegalArgumentException(msg);
+ }
+ return params;
+ }
+
+ /**
+ * Sets playback rate using {@link PlaybackParams}.
+ *
+ * @param params the playback params.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if params is not supported.
+ */
+ public native void setPlaybackParams(@NonNull PlaybackParams params);
+
+ /**
+ * Gets the playback params, containing the current playback rate.
+ *
+ * @return the playback params.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native PlaybackParams getPlaybackParams();
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param params the A/V sync params to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if params are not supported.
+ */
+ public native void setSyncParams(@NonNull SyncParams params);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync params
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncParams getSyncParams();
+
+ /**
* Seeks to specified time position.
*
* @param msec the offset in milliseconds from the start to seek to
@@ -1329,6 +1509,38 @@ public class MediaPlayer implements SubtitleController.Listener
public native void seekTo(int msec) throws IllegalStateException;
/**
+ * Get current playback position as a {@link MediaTimestamp}.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion using an anchor and a clock rate. During regular playback, the media
+ * time moves fairly constantly (though the anchor frame may be rebased to a current
+ * system time, the linear correlation stays steady). Therefore, this method does not
+ * need to be called often.
+ * <p>
+ * To help users get current playback position, this method always anchors the timestamp
+ * to the current {@link System#nanoTime system time}, so
+ * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ *
+ * @see MediaTimestamp
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: get the timestamp from native side
+ return new MediaTimestamp(
+ getCurrentPosition() * 1000L,
+ System.nanoTime(),
+ isPlaying() ? getPlaybackParams().getSpeed() : 0.f);
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ /**
* Gets the current playback position.
*
* @return the current position in milliseconds
@@ -1464,6 +1676,9 @@ public class MediaPlayer implements SubtitleController.Listener
* at the same time.
*/
public void release() {
+ synchronized (mPreparing) {
+ mPreparing.set(false);
+ }
stayAwake(false);
updateSurfaceScreenOn();
mOnPreparedListener = null;
@@ -1490,6 +1705,9 @@ public class MediaPlayer implements SubtitleController.Listener
* data source and calling prepare().
*/
public void reset() {
+ synchronized (mPreparing) {
+ mPreparing.set(false);
+ }
mSelectedSubtitleTrackIndex = -1;
synchronized(mOpenSubtitleSources) {
for (final InputStream is: mOpenSubtitleSources) {
@@ -1500,8 +1718,6 @@ public class MediaPlayer implements SubtitleController.Listener
}
mOpenSubtitleSources.clear();
}
- mOutOfBandSubtitleTracks.clear();
- mInbandSubtitleTracks = new SubtitleTrack[0];
if (mSubtitleController != null) {
mSubtitleController.reset();
}
@@ -1516,6 +1732,11 @@ public class MediaPlayer implements SubtitleController.Listener
if (mEventHandler != null) {
mEventHandler.removeCallbacksAndMessages(null);
}
+
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.clear();
+ mInbandTrackIndices.clear();
+ };
}
private native void _reset();
@@ -1560,6 +1781,8 @@ public class MediaPlayer implements SubtitleController.Listener
throw new IllegalArgumentException(msg);
}
mUsage = attributes.getUsage();
+ mBypassInterruptionPolicy = (attributes.getAllFlags()
+ & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0;
Parcel pattributes = Parcel.obtain();
attributes.writeToParcel(pattributes, AudioAttributes.FLATTEN_TAGS);
setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, pattributes);
@@ -1760,27 +1983,23 @@ public class MediaPlayer implements SubtitleController.Listener
public static final int MEDIA_TRACK_TYPE_AUDIO = 2;
public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3;
public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4;
+ public static final int MEDIA_TRACK_TYPE_METADATA = 5;
final int mTrackType;
final MediaFormat mFormat;
TrackInfo(Parcel in) {
mTrackType = in.readInt();
- // TODO: parcel in the full MediaFormat
+ // TODO: parcel in the full MediaFormat; currently we are using createSubtitleFormat
+ // even for audio/video tracks, meaning we only set the mime and language.
+ String mime = in.readString();
String language = in.readString();
+ mFormat = MediaFormat.createSubtitleFormat(mime, language);
- if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
- mFormat = MediaFormat.createSubtitleFormat(
- MEDIA_MIMETYPE_TEXT_SUBRIP, language);
- } else if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
- String mime = in.readString();
- mFormat = MediaFormat.createSubtitleFormat(mime, language);
+ if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
mFormat.setInteger(MediaFormat.KEY_IS_AUTOSELECT, in.readInt());
mFormat.setInteger(MediaFormat.KEY_IS_DEFAULT, in.readInt());
mFormat.setInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, in.readInt());
- } else {
- mFormat = new MediaFormat();
- mFormat.setString(MediaFormat.KEY_LANGUAGE, language);
}
}
@@ -1859,6 +2078,16 @@ public class MediaPlayer implements SubtitleController.Listener
};
+ // We would like domain specific classes with more informative names than the `first` and `second`
+ // in generic Pair, but we would also like to avoid creating new/trivial classes. As a compromise
+ // we document the meanings of `first` and `second` here:
+ //
+ // Pair.first - inband track index; non-null iff representing an inband track.
+ // Pair.second - a SubtitleTrack registered with mSubtitleController; non-null iff representing
+ // an inband subtitle track or any out-of-band track (subtitle or timedtext).
+ private Vector<Pair<Integer, SubtitleTrack>> mIndexTrackPairs = new Vector<>();
+ private BitSet mInbandTrackIndices = new BitSet();
+
/**
* Returns an array of track information.
*
@@ -1870,17 +2099,20 @@ public class MediaPlayer implements SubtitleController.Listener
public TrackInfo[] getTrackInfo() throws IllegalStateException {
TrackInfo trackInfo[] = getInbandTrackInfo();
// add out-of-band tracks
- TrackInfo allTrackInfo[] = new TrackInfo[trackInfo.length + mOutOfBandSubtitleTracks.size()];
- System.arraycopy(trackInfo, 0, allTrackInfo, 0, trackInfo.length);
- int i = trackInfo.length;
- for (SubtitleTrack track: mOutOfBandSubtitleTracks) {
- int type = track.isTimedText()
- ? TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT
- : TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE;
- allTrackInfo[i] = new TrackInfo(type, track.getFormat());
- ++i;
+ synchronized (mIndexTrackPairs) {
+ TrackInfo allTrackInfo[] = new TrackInfo[mIndexTrackPairs.size()];
+ for (int i = 0; i < allTrackInfo.length; i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.first != null) {
+ // inband track
+ allTrackInfo[i] = trackInfo[p.first];
+ } else {
+ SubtitleTrack track = p.second;
+ allTrackInfo[i] = new TrackInfo(track.getTrackType(), track.getFormat());
+ }
+ }
+ return allTrackInfo;
}
- return allTrackInfo;
}
private TrackInfo[] getInbandTrackInfo() throws IllegalStateException {
@@ -1939,22 +2171,58 @@ public class MediaPlayer implements SubtitleController.Listener
mSubtitleController.setAnchor(anchor);
}
- private final Object mInbandSubtitleLock = new Object();
- private SubtitleTrack[] mInbandSubtitleTracks;
+ /**
+ * The private version of setSubtitleAnchor is used internally to set mSubtitleController if
+ * necessary when clients don't provide their own SubtitleControllers using the public version
+ * {@link #setSubtitleAnchor(SubtitleController, Anchor)} (e.g. {@link VideoView} provides one).
+ */
+ private synchronized void setSubtitleAnchor() {
+ if (mSubtitleController == null) {
+ final HandlerThread thread = new HandlerThread("SetSubtitleAnchorThread");
+ thread.start();
+ Handler handler = new Handler(thread.getLooper());
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ Context context = ActivityThread.currentApplication();
+ mSubtitleController = new SubtitleController(context, mTimeProvider, MediaPlayer.this);
+ mSubtitleController.setAnchor(new Anchor() {
+ @Override
+ public void setSubtitleWidget(RenderingWidget subtitleWidget) {
+ }
+
+ @Override
+ public Looper getSubtitleLooper() {
+ return Looper.getMainLooper();
+ }
+ });
+ thread.getLooper().quitSafely();
+ }
+ });
+ try {
+ thread.join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ Log.w(TAG, "failed to join SetSubtitleAnchorThread");
+ }
+ }
+ }
+
private int mSelectedSubtitleTrackIndex = -1;
- private Vector<SubtitleTrack> mOutOfBandSubtitleTracks;
private Vector<InputStream> mOpenSubtitleSources;
private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() {
@Override
public void onSubtitleData(MediaPlayer mp, SubtitleData data) {
int index = data.getTrackIndex();
- if (index >= mInbandSubtitleTracks.length) {
- return;
- }
- SubtitleTrack track = mInbandSubtitleTracks[index];
- if (track != null) {
- track.onData(data);
+ synchronized (mIndexTrackPairs) {
+ for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
+ if (p.first != null && p.first == index && p.second != null) {
+ // inband subtitle track that owns data
+ SubtitleTrack track = p.second;
+ track.onData(data);
+ }
+ }
}
}
};
@@ -1973,18 +2241,24 @@ public class MediaPlayer implements SubtitleController.Listener
if (track == null) {
return;
}
- for (int i = 0; i < mInbandSubtitleTracks.length; i++) {
- if (mInbandSubtitleTracks[i] == track) {
- Log.v(TAG, "Selecting subtitle track " + i);
- mSelectedSubtitleTrackIndex = i;
- try {
- selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true);
- } catch (IllegalStateException e) {
+
+ synchronized (mIndexTrackPairs) {
+ for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
+ if (p.first != null && p.second == track) {
+ // inband subtitle track that is selected
+ mSelectedSubtitleTrackIndex = p.first;
+ break;
}
- setOnSubtitleDataListener(mSubtitleDataListener);
- break;
}
}
+
+ if (mSelectedSubtitleTrackIndex >= 0) {
+ try {
+ selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true);
+ } catch (IllegalStateException e) {
+ }
+ setOnSubtitleDataListener(mSubtitleDataListener);
+ }
// no need to select out-of-band tracks
}
@@ -2024,7 +2298,9 @@ public class MediaPlayer implements SubtitleController.Listener
mOpenSubtitleSources.remove(fIs);
}
scanner.close();
- mOutOfBandSubtitleTracks.add(track);
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
+ }
track.onData(contents.getBytes(), true /* eos */, ~0 /* runID: keep forever */);
return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
}
@@ -2042,27 +2318,37 @@ public class MediaPlayer implements SubtitleController.Listener
private void scanInternalSubtitleTracks() {
if (mSubtitleController == null) {
- Log.e(TAG, "Should have subtitle controller already set");
- return;
+ Log.w(TAG, "setSubtitleAnchor in MediaPlayer");
+ setSubtitleAnchor();
}
+ populateInbandTracks();
+
+ if (mSubtitleController != null) {
+ mSubtitleController.selectDefaultTrack();
+ }
+ }
+
+ private void populateInbandTracks() {
TrackInfo[] tracks = getInbandTrackInfo();
- synchronized (mInbandSubtitleLock) {
- SubtitleTrack[] inbandTracks = new SubtitleTrack[tracks.length];
- for (int i=0; i < tracks.length; i++) {
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < tracks.length; i++) {
+ if (mInbandTrackIndices.get(i)) {
+ continue;
+ } else {
+ mInbandTrackIndices.set(i);
+ }
+
+ // newly appeared inband track
if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
- if (i < mInbandSubtitleTracks.length) {
- inbandTracks[i] = mInbandSubtitleTracks[i];
- } else {
- SubtitleTrack track = mSubtitleController.addTrack(
- tracks[i].getFormat());
- inbandTracks[i] = track;
- }
+ SubtitleTrack track = mSubtitleController.addTrack(
+ tracks[i].getFormat());
+ mIndexTrackPairs.add(Pair.create(i, track));
+ } else {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(i, null));
}
}
- mInbandSubtitleTracks = inbandTracks;
}
- mSubtitleController.selectDefaultTrack();
}
/* TODO: Limit the total number of external timed text source to a reasonable number.
@@ -2199,28 +2485,20 @@ public class MediaPlayer implements SubtitleController.Listener
fFormat.setString(MediaFormat.KEY_MIME, mime);
fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1);
- Context context = ActivityThread.currentApplication();
// A MediaPlayer created by a VideoView should already have its mSubtitleController set.
if (mSubtitleController == null) {
- mSubtitleController = new SubtitleController(context, mTimeProvider, this);
- mSubtitleController.setAnchor(new Anchor() {
- @Override
- public void setSubtitleWidget(RenderingWidget subtitleWidget) {
- }
-
- @Override
- public Looper getSubtitleLooper() {
- return Looper.getMainLooper();
- }
- });
+ setSubtitleAnchor();
}
if (!mSubtitleController.hasRendererFor(fFormat)) {
// test and add not atomic
+ Context context = ActivityThread.currentApplication();
mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler));
}
final SubtitleTrack track = mSubtitleController.addTrack(fFormat);
- mOutOfBandSubtitleTracks.add(track);
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
+ }
final FileDescriptor fd3 = fd2;
final long offset2 = offset;
@@ -2292,12 +2570,18 @@ public class MediaPlayer implements SubtitleController.Listener
* @see #deselectTrack(int)
*/
public int getSelectedTrack(int trackType) throws IllegalStateException {
- if (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE && mSubtitleController != null) {
+ if (mSubtitleController != null
+ && (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE
+ || trackType == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT)) {
SubtitleTrack subtitleTrack = mSubtitleController.getSelectedTrack();
if (subtitleTrack != null) {
- int index = mOutOfBandSubtitleTracks.indexOf(subtitleTrack);
- if (index >= 0) {
- return mInbandSubtitleTracks.length + index;
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < mIndexTrackPairs.size(); i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.second == subtitleTrack && subtitleTrack.getTrackType() == trackType) {
+ return i;
+ }
+ }
}
}
}
@@ -2309,8 +2593,16 @@ public class MediaPlayer implements SubtitleController.Listener
request.writeInt(INVOKE_ID_GET_SELECTED_TRACK);
request.writeInt(trackType);
invoke(request, reply);
- int selectedTrack = reply.readInt();
- return selectedTrack;
+ int inbandTrackIndex = reply.readInt();
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < mIndexTrackPairs.size(); i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.first != null && p.first == inbandTrackIndex) {
+ return i;
+ }
+ }
+ }
+ return -1;
} finally {
request.recycle();
reply.recycle();
@@ -2370,36 +2662,30 @@ public class MediaPlayer implements SubtitleController.Listener
private void selectOrDeselectTrack(int index, boolean select)
throws IllegalStateException {
// handle subtitle track through subtitle controller
- SubtitleTrack track = null;
- synchronized (mInbandSubtitleLock) {
- if (mInbandSubtitleTracks.length == 0) {
- TrackInfo[] tracks = getInbandTrackInfo();
- mInbandSubtitleTracks = new SubtitleTrack[tracks.length];
- for (int i=0; i < tracks.length; i++) {
- if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
- mInbandSubtitleTracks[i] = mSubtitleController.addTrack(tracks[i].getFormat());
- }
- }
- }
+ populateInbandTracks();
+
+ Pair<Integer,SubtitleTrack> p = null;
+ try {
+ p = mIndexTrackPairs.get(index);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ // ignore bad index
+ return;
}
- if (index < mInbandSubtitleTracks.length) {
- track = mInbandSubtitleTracks[index];
- } else if (index < mInbandSubtitleTracks.length + mOutOfBandSubtitleTracks.size()) {
- track = mOutOfBandSubtitleTracks.get(index - mInbandSubtitleTracks.length);
+ SubtitleTrack track = p.second;
+ if (track == null) {
+ // inband (de)select
+ selectOrDeselectInbandTrack(p.first, select);
+ return;
}
- if (mSubtitleController != null && track != null) {
- if (select) {
- if (track.isTimedText()) {
- int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
- if (ttIndex >= 0 && ttIndex < mInbandSubtitleTracks.length) {
- // deselect inband counterpart
- selectOrDeselectInbandTrack(ttIndex, false);
- }
- }
- mSubtitleController.selectTrack(track);
- } else if (mSubtitleController.getSelectedTrack() == track) {
+ if (mSubtitleController == null) {
+ return;
+ }
+
+ if (!select) {
+ // out-of-band deselect
+ if (mSubtitleController.getSelectedTrack() == track) {
mSubtitleController.selectTrack(null);
} else {
Log.w(TAG, "trying to deselect track that was not selected");
@@ -2407,7 +2693,20 @@ public class MediaPlayer implements SubtitleController.Listener
return;
}
- selectOrDeselectInbandTrack(index, select);
+ // out-of-band select
+ if (track.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
+ synchronized (mIndexTrackPairs) {
+ if (ttIndex >= 0 && ttIndex < mIndexTrackPairs.size()) {
+ Pair<Integer,SubtitleTrack> p2 = mIndexTrackPairs.get(ttIndex);
+ if (p2.first != null && p2.second == null) {
+ // deselect inband counterpart
+ selectOrDeselectInbandTrack(p2.first, false);
+ }
+ }
+ }
+ }
+ mSubtitleController.selectTrack(track);
}
private void selectOrDeselectInbandTrack(int index, boolean select)
@@ -2500,6 +2799,7 @@ public class MediaPlayer implements SubtitleController.Listener
private static final int MEDIA_ERROR = 100;
private static final int MEDIA_INFO = 200;
private static final int MEDIA_SUBTITLE_DATA = 201;
+ private static final int MEDIA_META_DATA = 202;
private TimeProvider mTimeProvider;
@@ -2528,7 +2828,12 @@ public class MediaPlayer implements SubtitleController.Listener
}
switch(msg.what) {
case MEDIA_PREPARED:
- scanInternalSubtitleTracks();
+ synchronized (mPreparing) {
+ if (mPreparing.get()) {
+ scanInternalSubtitleTracks();
+ mPreparing.set(false);
+ }
+ }
if (mOnPreparedListener != null)
mOnPreparedListener.onPrepared(mMediaPlayer);
return;
@@ -2540,15 +2845,21 @@ public class MediaPlayer implements SubtitleController.Listener
return;
case MEDIA_STOPPED:
- if (mTimeProvider != null) {
- mTimeProvider.onStopped();
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onStopped();
+ }
}
break;
case MEDIA_STARTED:
case MEDIA_PAUSED:
- if (mTimeProvider != null) {
- mTimeProvider.onPaused(msg.what == MEDIA_PAUSED);
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onPaused(msg.what == MEDIA_PAUSED);
+ }
}
break;
@@ -2558,21 +2869,26 @@ public class MediaPlayer implements SubtitleController.Listener
return;
case MEDIA_SEEK_COMPLETE:
- if (mOnSeekCompleteListener != null) {
- mOnSeekCompleteListener.onSeekComplete(mMediaPlayer);
- }
- // fall through
+ if (mOnSeekCompleteListener != null) {
+ mOnSeekCompleteListener.onSeekComplete(mMediaPlayer);
+ }
+ // fall through
case MEDIA_SKIPPED:
- if (mTimeProvider != null) {
- mTimeProvider.onSeekComplete(mMediaPlayer);
- }
- return;
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onSeekComplete(mMediaPlayer);
+ }
+ }
+ return;
case MEDIA_SET_VIDEO_SIZE:
- if (mOnVideoSizeChangedListener != null)
- mOnVideoSizeChangedListener.onVideoSizeChanged(mMediaPlayer, msg.arg1, msg.arg2);
- return;
+ if (mOnVideoSizeChangedListener != null) {
+ mOnVideoSizeChangedListener.onVideoSizeChanged(
+ mMediaPlayer, msg.arg1, msg.arg2);
+ }
+ return;
case MEDIA_ERROR:
Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")");
@@ -2602,6 +2918,13 @@ public class MediaPlayer implements SubtitleController.Listener
mSubtitleController.selectDefaultTrack();
}
break;
+ case MEDIA_INFO_BUFFERING_START:
+ case MEDIA_INFO_BUFFERING_END:
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onBuffering(msg.arg1 == MEDIA_INFO_BUFFERING_START);
+ }
+ break;
}
if (mOnInfoListener != null) {
@@ -2636,6 +2959,18 @@ public class MediaPlayer implements SubtitleController.Listener
}
return;
+ case MEDIA_META_DATA:
+ if (mOnTimedMetaDataAvailableListener == null) {
+ return;
+ }
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel) msg.obj;
+ TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel);
+ parcel.recycle();
+ mOnTimedMetaDataAvailableListener.onTimedMetaDataAvailable(mMediaPlayer, data);
+ }
+ return;
+
case MEDIA_NOP: // interface test message - ignore
break;
@@ -2872,6 +3207,46 @@ public class MediaPlayer implements SubtitleController.Listener
private OnSubtitleDataListener mOnSubtitleDataListener;
+ /**
+ * Interface definition of a callback to be invoked when a
+ * track has timed metadata available.
+ *
+ * @see MediaPlayer#setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener)
+ */
+ public interface OnTimedMetaDataAvailableListener
+ {
+ /**
+ * Called to indicate avaliable timed metadata
+ * <p>
+ * This method will be called as timed metadata is extracted from the media,
+ * in the same order as it occurs in the media. The timing of this event is
+ * not controlled by the associated timestamp.
+ *
+ * @param mp the MediaPlayer associated with this callback
+ * @param data the timed metadata sample associated with this event
+ */
+ public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData data);
+ }
+
+ /**
+ * Register a callback to be invoked when a selected track has timed metadata available.
+ * <p>
+ * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates
+ * {@link TimedMetaData}.
+ *
+ * @see MediaPlayer#selectTrack(int)
+ * @see MediaPlayer.OnTimedMetaDataAvailableListener
+ * @see TimedMetaData
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener listener)
+ {
+ mOnTimedMetaDataAvailableListener = listener;
+ }
+
+ private OnTimedMetaDataAvailableListener mOnTimedMetaDataAvailableListener;
+
/* Do not change these values without updating their counterparts
* in include/media/mediaplayer.h!
*/
@@ -3088,6 +3463,7 @@ public class MediaPlayer implements SubtitleController.Listener
private MediaPlayer mPlayer;
private boolean mPaused = true;
private boolean mStopped = true;
+ private boolean mBuffering;
private long mLastReportedTime;
private long mTimeAdjustment;
// since we are expecting only a handful listeners per stream, there is
@@ -3181,12 +3557,22 @@ public class MediaPlayer implements SubtitleController.Listener
}
/** @hide */
+ public void onBuffering(boolean buffering) {
+ synchronized (this) {
+ if (DEBUG) Log.d(TAG, "onBuffering: " + buffering);
+ mBuffering = buffering;
+ scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+
+ /** @hide */
public void onStopped() {
synchronized(this) {
if (DEBUG) Log.d(TAG, "onStopped");
mPaused = true;
mStopped = true;
mSeeking = false;
+ mBuffering = false;
scheduleNotification(NOTIFY_STOP, 0 /* delay */);
}
}
@@ -3207,6 +3593,7 @@ public class MediaPlayer implements SubtitleController.Listener
synchronized(this) {
mStopped = false;
mSeeking = true;
+ mBuffering = false;
scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
}
}
@@ -3409,7 +3796,7 @@ public class MediaPlayer implements SubtitleController.Listener
nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) {
try {
mLastTimeUs = mPlayer.getCurrentPosition() * 1000L;
- mPaused = !mPlayer.isPlaying();
+ mPaused = !mPlayer.isPlaying() || mBuffering;
if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs);
} catch (IllegalStateException e) {
if (mPausing) {
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 81d5afe..ed2c4cbd 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -16,6 +16,8 @@
package android.media;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
import android.app.ActivityThread;
import android.hardware.Camera;
import android.os.Handler;
@@ -110,7 +112,8 @@ public class MediaRecorder
/* Native setup requires a weak reference to our object.
* It's easier to create it here than in C++.
*/
- native_setup(new WeakReference<MediaRecorder>(this), packageName);
+ native_setup(new WeakReference<MediaRecorder>(this), packageName,
+ ActivityThread.currentOpPackageName());
}
/**
@@ -139,6 +142,30 @@ public class MediaRecorder
public native Surface getSurface();
/**
+ * Configures the recorder to use a persistent surface when using SURFACE video source.
+ * <p> May only be called before {@link #prepare}. If called, {@link #getSurface} should
+ * not be used and will throw IllegalStateException. Frames rendered to the Surface
+ * before {@link #start} will be discarded.</p>
+
+ * @param surface a persistent input surface created by
+ * {@link MediaCodec#createPersistentInputSurface}
+ * @throws IllegalStateException if it is called after {@link #prepare} and before
+ * {@link #stop}.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link MediaCodec#createPersistentInputSurface}.
+ * @see MediaCodec#createPersistentInputSurface
+ * @see MediaRecorder.VideoSource
+ */
+ public void setInputSurface(@NonNull Surface surface) {
+ if (!(surface instanceof MediaCodec.PersistentSurface)) {
+ throw new IllegalArgumentException("not a PersistentSurface");
+ }
+ native_setInputSurface(surface);
+ }
+
+ private native final void native_setInputSurface(@NonNull Surface surface);
+
+ /**
* Sets a Surface to show a preview of recorded media (video). Calls this
* before prepare() to make sure that the desirable preview display is
* set. If {@link #setCamera(Camera)} is used and the surface has been
@@ -156,8 +183,11 @@ public class MediaRecorder
}
/**
- * Defines the audio source. These constants are used with
- * {@link MediaRecorder#setAudioSource(int)}.
+ * Defines the audio source.
+ * An audio source defines both a default physical source of audio signal, and a recording
+ * configuration. These constants are for instance used
+ * in {@link MediaRecorder#setAudioSource(int)} or
+ * {@link AudioRecord.Builder#setAudioSource(int)}.
*/
public final class AudioSource {
@@ -167,7 +197,7 @@ public class MediaRecorder
public final static int AUDIO_SOURCE_INVALID = -1;
/* Do not change these values without updating their counterparts
- * in system/core/include/system/audio.h!
+ * in system/media/audio/include/system/audio.h!
*/
/** Default audio source **/
@@ -222,12 +252,11 @@ public class MediaRecorder
public static final int REMOTE_SUBMIX = 8;
/**
- * Audio source for FM, which is used to capture current FM tuner output by FMRadio app.
- * There are two use cases, one is for record FM stream for later listening, another is
- * for FM indirect mode(the routing except FM to headset(headphone) device routing).
+ * Audio source for capturing broadcast radio tuner output.
* @hide
*/
- public static final int FM_TUNER = 1998;
+ @SystemApi
+ public static final int RADIO_TUNER = 1998;
/**
* Audio source for preemptible, low-priority software hotword detection
@@ -240,7 +269,8 @@ public class MediaRecorder
* This is a hidden audio source.
* @hide
*/
- protected static final int HOTWORD = 1999;
+ @SystemApi
+ public static final int HOTWORD = 1999;
}
/**
@@ -437,10 +467,7 @@ public class MediaRecorder
public void setCaptureRate(double fps) {
// Make sure that time lapse is enabled when this method is called.
setParameter("time-lapse-enable=1");
-
- double timeBetweenFrameCapture = 1 / fps;
- long timeBetweenFrameCaptureUs = (long) (1000000 * timeBetweenFrameCapture);
- setParameter("time-between-time-lapse-frame-capture=" + timeBetweenFrameCaptureUs);
+ setParameter("time-lapse-fps=" + fps);
}
/**
@@ -1061,7 +1088,7 @@ public class MediaRecorder
private static native final void native_init();
private native final void native_setup(Object mediarecorder_this,
- String clientName) throws IllegalStateException;
+ String clientName, String opPackageName) throws IllegalStateException;
private native final void native_finalize();
diff --git a/media/java/android/media/MediaRouter.java b/media/java/android/media/MediaRouter.java
index 958ffab..a046512 100644
--- a/media/java/android/media/MediaRouter.java
+++ b/media/java/android/media/MediaRouter.java
@@ -17,6 +17,8 @@
package android.media;
import android.Manifest;
+import android.annotation.DrawableRes;
+import android.annotation.NonNull;
import android.app.ActivityThread;
import android.content.BroadcastReceiver;
import android.content.Context;
@@ -171,15 +173,15 @@ public class MediaRouter {
}
void updateAudioRoutes(AudioRoutesInfo newRoutes) {
- if (newRoutes.mMainType != mCurAudioRoutesInfo.mMainType) {
- mCurAudioRoutesInfo.mMainType = newRoutes.mMainType;
+ if (newRoutes.mainType != mCurAudioRoutesInfo.mainType) {
+ mCurAudioRoutesInfo.mainType = newRoutes.mainType;
int name;
- if ((newRoutes.mMainType&AudioRoutesInfo.MAIN_HEADPHONES) != 0
- || (newRoutes.mMainType&AudioRoutesInfo.MAIN_HEADSET) != 0) {
+ if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HEADPHONES) != 0
+ || (newRoutes.mainType&AudioRoutesInfo.MAIN_HEADSET) != 0) {
name = com.android.internal.R.string.default_audio_route_name_headphones;
- } else if ((newRoutes.mMainType&AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
name = com.android.internal.R.string.default_audio_route_name_dock_speakers;
- } else if ((newRoutes.mMainType&AudioRoutesInfo.MAIN_HDMI) != 0) {
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HDMI) != 0) {
name = com.android.internal.R.string.default_media_route_name_hdmi;
} else {
name = com.android.internal.R.string.default_audio_route_name;
@@ -188,21 +190,21 @@ public class MediaRouter {
dispatchRouteChanged(sStatic.mDefaultAudioVideo);
}
- final int mainType = mCurAudioRoutesInfo.mMainType;
+ final int mainType = mCurAudioRoutesInfo.mainType;
- if (!TextUtils.equals(newRoutes.mBluetoothName, mCurAudioRoutesInfo.mBluetoothName)) {
- mCurAudioRoutesInfo.mBluetoothName = newRoutes.mBluetoothName;
- if (mCurAudioRoutesInfo.mBluetoothName != null) {
+ if (!TextUtils.equals(newRoutes.bluetoothName, mCurAudioRoutesInfo.bluetoothName)) {
+ mCurAudioRoutesInfo.bluetoothName = newRoutes.bluetoothName;
+ if (mCurAudioRoutesInfo.bluetoothName != null) {
if (sStatic.mBluetoothA2dpRoute == null) {
final RouteInfo info = new RouteInfo(sStatic.mSystemCategory);
- info.mName = mCurAudioRoutesInfo.mBluetoothName;
+ info.mName = mCurAudioRoutesInfo.bluetoothName;
info.mDescription = sStatic.mResources.getText(
com.android.internal.R.string.bluetooth_a2dp_audio_route_name);
info.mSupportedTypes = ROUTE_TYPE_LIVE_AUDIO;
sStatic.mBluetoothA2dpRoute = info;
addRouteStatic(sStatic.mBluetoothA2dpRoute);
} else {
- sStatic.mBluetoothA2dpRoute.mName = mCurAudioRoutesInfo.mBluetoothName;
+ sStatic.mBluetoothA2dpRoute.mName = mCurAudioRoutesInfo.bluetoothName;
dispatchRouteChanged(sStatic.mBluetoothA2dpRoute);
}
} else if (sStatic.mBluetoothA2dpRoute != null) {
@@ -881,8 +883,12 @@ public class MediaRouter {
* @param types type flags indicating which types this route should be used for.
* The route must support at least a subset.
* @param route Route to select
+ * @throws IllegalArgumentException if the given route is {@code null}
*/
- public void selectRoute(int types, RouteInfo route) {
+ public void selectRoute(int types, @NonNull RouteInfo route) {
+ if (route == null) {
+ throw new IllegalArgumentException("Route cannot be null.");
+ }
selectRouteStatic(types, route, true);
}
@@ -893,7 +899,8 @@ public class MediaRouter {
selectRouteStatic(types, route, explicit);
}
- static void selectRouteStatic(int types, RouteInfo route, boolean explicit) {
+ static void selectRouteStatic(int types, @NonNull RouteInfo route, boolean explicit) {
+ assert(route != null);
final RouteInfo oldRoute = sStatic.mSelectedRoute;
if (oldRoute == route) return;
if (!route.matchesTypes(types)) {
@@ -916,7 +923,7 @@ public class MediaRouter {
final WifiDisplay activeDisplay =
sStatic.mDisplayService.getWifiDisplayStatus().getActiveDisplay();
final boolean oldRouteHasAddress = oldRoute != null && oldRoute.mDeviceAddress != null;
- final boolean newRouteHasAddress = route != null && route.mDeviceAddress != null;
+ final boolean newRouteHasAddress = route.mDeviceAddress != null;
if (activeDisplay != null || oldRouteHasAddress || newRouteHasAddress) {
if (newRouteHasAddress && !matchesDeviceAddress(activeDisplay, route)) {
if (sStatic.mCanConfigureWifiDisplays) {
@@ -1499,18 +1506,18 @@ public class MediaRouter {
/**
* The default playback type, "local", indicating the presentation of the media is happening
- * on the same device (e.g. a phone, a tablet) as where it is controlled from.
+ * on the same device (e&#46;g&#46; a phone, a tablet) as where it is controlled from.
* @see #getPlaybackType()
*/
public final static int PLAYBACK_TYPE_LOCAL = 0;
/**
* A playback type indicating the presentation of the media is happening on
- * a different device (i.e. the remote device) than where it is controlled from.
+ * a different device (i&#46;e&#46; the remote device) than where it is controlled from.
* @see #getPlaybackType()
*/
public final static int PLAYBACK_TYPE_REMOTE = 1;
/**
- * Playback information indicating the playback volume is fixed, i.e. it cannot be
+ * Playback information indicating the playback volume is fixed, i&#46;e&#46; it cannot be
* controlled from this object. An example of fixed playback volume is a remote player,
* playing over HDMI where the user prefers to control the volume on the HDMI sink, rather
* than attenuate at the source.
@@ -2083,7 +2090,7 @@ public class MediaRouter {
*
* @param resId Resource ID of an icon drawable to use to represent this route
*/
- public void setIconResource(int resId) {
+ public void setIconResource(@DrawableRes int resId) {
setIconDrawable(sStatic.mResources.getDrawable(resId));
}
@@ -2393,7 +2400,7 @@ public class MediaRouter {
*
* @param resId Resource ID of an icon drawable to use to represent this group
*/
- public void setIconResource(int resId) {
+ public void setIconResource(@DrawableRes int resId) {
setIconDrawable(sStatic.mResources.getDrawable(resId));
}
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index 1255276..9ea6722 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -553,15 +553,8 @@ public class MediaScanner
boolean isimage = MediaFile.isImageFileType(mFileType);
if (isaudio || isvideo || isimage) {
- if (mExternalIsEmulated && path.startsWith(mExternalStoragePath)) {
- // try to rewrite the path to bypass the sd card fuse layer
- String directPath = Environment.getMediaStorageDirectory() +
- path.substring(mExternalStoragePath.length());
- File f = new File(directPath);
- if (f.exists()) {
- path = directPath;
- }
- }
+ path = Environment.maybeTranslateEmulatedPathToInternal(new File(path))
+ .getAbsolutePath();
}
// we only extract metadata for audio and video files
diff --git a/media/java/android/media/MediaSync.java b/media/java/android/media/MediaSync.java
new file mode 100644
index 0000000..b07931d
--- /dev/null
+++ b/media/java/android/media/MediaSync.java
@@ -0,0 +1,617 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.AudioTrack;
+import android.media.PlaybackParams;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * MediaSync class can be used to synchronously playback audio and video streams.
+ * It can be used to play audio-only or video-only stream, too.
+ *
+ * <p>MediaSync is generally used like this:
+ * <pre>
+ * MediaSync sync = new MediaSync();
+ * sync.setSurface(surface);
+ * Surface inputSurface = sync.createInputSurface();
+ * ...
+ * // MediaCodec videoDecoder = ...;
+ * videoDecoder.configure(format, inputSurface, ...);
+ * ...
+ * sync.setAudioTrack(audioTrack);
+ * sync.setCallback(new MediaSync.Callback() {
+ * {@literal @Override}
+ * public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferId) {
+ * ...
+ * }
+ * }, null);
+ * // This needs to be done since sync is paused on creation.
+ * sync.setPlaybackRate(1.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ *
+ * for (;;) {
+ * ...
+ * // send video frames to surface for rendering, e.g., call
+ * // videoDecoder.releaseOutputBuffer(videoOutputBufferIx, videoPresentationTimeNs);
+ * // More details are available as below.
+ * ...
+ * sync.queueAudio(audioByteBuffer, bufferId, audioPresentationTimeUs); // non-blocking.
+ * // The audioByteBuffer and bufferId will be returned via callback.
+ * // More details are available as below.
+ * ...
+ * ...
+ * }
+ * sync.setPlaybackRate(0.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ * sync.release();
+ * sync = null;
+ *
+ * // The following code snippet illustrates how video/audio raw frames are created by
+ * // MediaCodec's, how they are fed to MediaSync and how they are returned by MediaSync.
+ * // This is the callback from MediaCodec.
+ * onOutputBufferAvailable(MediaCodec codec, int bufferId, BufferInfo info) {
+ * // ...
+ * if (codec == videoDecoder) {
+ * // surface timestamp must contain media presentation time in nanoseconds.
+ * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime);
+ * } else {
+ * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId);
+ * sync.queueByteBuffer(audioByteBuffer, bufferId, info.size, info.presentationTime);
+ * }
+ * // ...
+ * }
+ *
+ * // This is the callback from MediaSync.
+ * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferId) {
+ * // ...
+ * audioDecoder.releaseBuffer(bufferId, false);
+ * // ...
+ * }
+ *
+ * </pre>
+ *
+ * The client needs to configure corresponding sink by setting the Surface and/or AudioTrack
+ * based on the stream type it will play.
+ * <p>
+ * For video, the client needs to call {@link #createInputSurface} to obtain a surface on
+ * which it will render video frames.
+ * <p>
+ * For audio, the client needs to set up audio track correctly, e.g., using {@link
+ * AudioTrack#MODE_STREAM}. The audio buffers are sent to MediaSync directly via {@link
+ * #queueAudio}, and are returned to the client via {@link Callback#onAudioBufferConsumed}
+ * asynchronously. The client should not modify an audio buffer till it's returned.
+ * <p>
+ * The client can optionally pre-fill audio/video buffers by setting playback rate to 0.0,
+ * and then feed audio/video buffers to corresponding components. This can reduce possible
+ * initial underrun.
+ * <p>
+ */
+public final class MediaSync {
+ /**
+ * MediaSync callback interface. Used to notify the user asynchronously
+ * of various MediaSync events.
+ */
+ public static abstract class Callback {
+ /**
+ * Called when returning an audio buffer which has been consumed.
+ *
+ * @param sync The MediaSync object.
+ * @param audioBuffer The returned audio buffer.
+ * @param bufferId The ID associated with audioBuffer as passed into
+ * {@link MediaSync#queueAudio}.
+ */
+ public abstract void onAudioBufferConsumed(
+ @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferId);
+ }
+
+ /** Audio track failed.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_AUDIOTRACK_FAIL = 1;
+
+ /** The surface failed to handle video buffers.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_SURFACE_FAIL = 2;
+
+ /**
+ * Interface definition of a callback to be invoked when there
+ * has been an error during an asynchronous operation (other errors
+ * will throw exceptions at method call time).
+ */
+ public interface OnErrorListener {
+ /**
+ * Called to indicate an error.
+ *
+ * @param sync The MediaSync the error pertains to
+ * @param what The type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIASYNC_ERROR_AUDIOTRACK_FAIL}
+ * <li>{@link #MEDIASYNC_ERROR_SURFACE_FAIL}
+ * </ul>
+ * @param extra an extra code, specific to the error. Typically
+ * implementation dependent.
+ */
+ void onError(@NonNull MediaSync sync, int what, int extra);
+ }
+
+ private static final String TAG = "MediaSync";
+
+ private static final int EVENT_CALLBACK = 1;
+ private static final int EVENT_SET_CALLBACK = 2;
+
+ private static final int CB_RETURN_AUDIO_BUFFER = 1;
+
+ private static class AudioBuffer {
+ public ByteBuffer mByteBuffer;
+ public int mBufferIndex;
+ long mPresentationTimeUs;
+
+ public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferId,
+ long presentationTimeUs) {
+ mByteBuffer = byteBuffer;
+ mBufferIndex = bufferId;
+ mPresentationTimeUs = presentationTimeUs;
+ }
+ }
+
+ private final Object mCallbackLock = new Object();
+ private Handler mCallbackHandler = null;
+ private MediaSync.Callback mCallback = null;
+
+ private final Object mOnErrorListenerLock = new Object();
+ private Handler mOnErrorListenerHandler = null;
+ private MediaSync.OnErrorListener mOnErrorListener = null;
+
+ private Thread mAudioThread = null;
+ // Created on mAudioThread when mAudioThread is started. When used on user thread, they should
+ // be guarded by checking mAudioThread.
+ private Handler mAudioHandler = null;
+ private Looper mAudioLooper = null;
+
+ private final Object mAudioLock = new Object();
+ private AudioTrack mAudioTrack = null;
+ private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>();
+ // this is only used for paused/running decisions, so it is not affected by clock drift
+ private float mPlaybackRate = 0.0f;
+
+ private long mNativeContext;
+
+ /**
+ * Class constructor. On creation, MediaSync is paused, i.e., playback rate is 0.0f.
+ */
+ public MediaSync() {
+ native_setup();
+ }
+
+ private native final void native_setup();
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ private native final void native_finalize();
+
+ /**
+ * Make sure you call this when you're done to free up any opened
+ * component instance instead of relying on the garbage collector
+ * to do this for you at some point in the future.
+ */
+ public final void release() {
+ returnAudioBuffers();
+ if (mAudioThread != null) {
+ if (mAudioLooper != null) {
+ mAudioLooper.quit();
+ }
+ }
+ setCallback(null, null);
+ native_release();
+ }
+
+ private native final void native_release();
+
+ /**
+ * Sets an asynchronous callback for actionable MediaSync events.
+ * <p>
+ * This method can be called multiple times to update a previously set callback. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param cb The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setCallback(@Nullable /* MediaSync. */ Callback cb, @Nullable Handler handler) {
+ synchronized(mCallbackLock) {
+ if (handler != null) {
+ mCallbackHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mCallbackHandler = null;
+ } else {
+ mCallbackHandler = new Handler(looper);
+ }
+ }
+
+ mCallback = cb;
+ }
+ }
+
+ /**
+ * Sets an asynchronous callback for error events.
+ * <p>
+ * This method can be called multiple times to update a previously set listener. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param listener The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setOnErrorListener(@Nullable /* MediaSync. */ OnErrorListener listener,
+ @Nullable Handler handler) {
+ synchronized(mOnErrorListenerLock) {
+ if (handler != null) {
+ mOnErrorListenerHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mOnErrorListenerHandler = null;
+ } else {
+ mOnErrorListenerHandler = new Handler(looper);
+ }
+ }
+
+ mOnErrorListener = listener;
+ }
+ }
+
+ /**
+ * Sets the output surface for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param surface Specify a surface on which to render the video data.
+ * @throws IllegalArgumentException if the surface has been released, is invalid,
+ * or can not be connected.
+ * @throws IllegalStateException if setting the surface is not supported, e.g.
+ * not in the Initialized state, or another surface has already been set.
+ */
+ public void setSurface(@Nullable Surface surface) {
+ native_setSurface(surface);
+ }
+
+ private native final void native_setSurface(@Nullable Surface surface);
+
+ /**
+ * Sets the audio track for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param audioTrack Specify an AudioTrack through which to render the audio data.
+ * @throws IllegalArgumentException if the audioTrack has been released, or is invalid.
+ * @throws IllegalStateException if setting the audio track is not supported, e.g.
+ * not in the Initialized state, or another audio track has already been set.
+ */
+ public void setAudioTrack(@Nullable AudioTrack audioTrack) {
+ native_setAudioTrack(audioTrack);
+ mAudioTrack = audioTrack;
+ if (audioTrack != null && mAudioThread == null) {
+ createAudioThread();
+ }
+ }
+
+ private native final void native_setAudioTrack(@Nullable AudioTrack audioTrack);
+
+ /**
+ * Requests a Surface to use as the input. This may only be called after
+ * {@link #setSurface}.
+ * <p>
+ * The application is responsible for calling release() on the Surface when
+ * done.
+ * @throws IllegalStateException if not set, or another input surface has
+ * already been created.
+ */
+ @NonNull
+ public native final Surface createInputSurface();
+
+ /**
+ * Sets playback rate using {@link PlaybackParams}.
+ * <p>
+ * When using MediaSync with {@link AudioTrack}, set playback params using this
+ * call instead of calling it directly on the track, so that the sync is aware of
+ * the params change.
+ * <p>
+ * This call also works if there is no audio track.
+ *
+ * @param params the playback params to use. {@link PlaybackParams#getSpeed
+ * Speed} is the ratio between desired playback rate and normal one. 1.0 means
+ * normal playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
+ * while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
+ * does not change as a result of this call. To restore the original rate at any time,
+ * use speed of 1.0.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ * @throws IllegalArgumentException if the params are not supported.
+ */
+ public void setPlaybackParams(@NonNull PlaybackParams params) {
+ synchronized(mAudioLock) {
+ mPlaybackRate = native_setPlaybackParams(params);;
+ }
+ if (mPlaybackRate != 0.0 && mAudioThread != null) {
+ postRenderAudio(0);
+ }
+ }
+
+ /**
+ * Gets the playback rate using {@link PlaybackParams}.
+ *
+ * @return the playback rate being used.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ */
+ @NonNull
+ public native PlaybackParams getPlaybackParams();
+
+ private native float native_setPlaybackParams(@NonNull PlaybackParams params);
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param params the A/V sync params to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if params are not supported.
+ */
+ public void setSyncParams(@NonNull SyncParams params) {
+ synchronized(mAudioLock) {
+ mPlaybackRate = native_setSyncParams(params);;
+ }
+ if (mPlaybackRate != 0.0 && mAudioThread != null) {
+ postRenderAudio(0);
+ }
+ }
+
+ private native float native_setSyncParams(@NonNull SyncParams params);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync params
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncParams getSyncParams();
+
+ /**
+ * Flushes all buffers from the sync object.
+ * <p>
+ * No callbacks are received for the flushed buffers.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ public void flush() {
+ synchronized(mAudioLock) {
+ mAudioBuffers.clear();
+ mCallbackHandler.removeCallbacksAndMessages(null);
+ }
+ // TODO implement this for surface buffers.
+ }
+
+ /**
+ * Get current playback position.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion using an anchor and a clock rate. During regular playback, the media
+ * time moves fairly constantly (though the anchor frame may be rebased to a current
+ * system time, the linear correlation stays steady). Therefore, this method does not
+ * need to be called often.
+ * <p>
+ * To help users get current playback position, this method always anchors the timestamp
+ * to the current {@link System#nanoTime system time}, so
+ * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ *
+ * @see MediaTimestamp
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: create the timestamp in native
+ MediaTimestamp timestamp = new MediaTimestamp();
+ if (native_getTimestamp(timestamp)) {
+ return timestamp;
+ } else {
+ return null;
+ }
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ private native final boolean native_getTimestamp(@NonNull MediaTimestamp timestamp);
+
+ /**
+ * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode).
+ * @param audioData the buffer that holds the data to play. This buffer will be returned
+ * to the client via registered callback.
+ * @param bufferId an integer used to identify audioData. It will be returned to
+ * the client along with audioData. This helps applications to keep track of audioData,
+ * e.g., it can be used to store the output buffer index used by the audio codec.
+ * @param presentationTimeUs the presentation timestamp in microseconds for the first frame
+ * in the buffer.
+ * @throws IllegalStateException if audio track is not set or internal configureation
+ * has not been done correctly.
+ */
+ public void queueAudio(
+ @NonNull ByteBuffer audioData, int bufferId, long presentationTimeUs) {
+ if (mAudioTrack == null || mAudioThread == null) {
+ throw new IllegalStateException(
+ "AudioTrack is NOT set or audio thread is not created");
+ }
+
+ synchronized(mAudioLock) {
+ mAudioBuffers.add(new AudioBuffer(audioData, bufferId, presentationTimeUs));
+ }
+
+ if (mPlaybackRate != 0.0) {
+ postRenderAudio(0);
+ }
+ }
+
+ // When called on user thread, make sure to check mAudioThread != null.
+ private void postRenderAudio(long delayMillis) {
+ mAudioHandler.postDelayed(new Runnable() {
+ public void run() {
+ synchronized(mAudioLock) {
+ if (mPlaybackRate == 0.0) {
+ return;
+ }
+
+ if (mAudioBuffers.isEmpty()) {
+ return;
+ }
+
+ AudioBuffer audioBuffer = mAudioBuffers.get(0);
+ int size = audioBuffer.mByteBuffer.remaining();
+ int sizeWritten = mAudioTrack.write(
+ audioBuffer.mByteBuffer,
+ size,
+ AudioTrack.WRITE_NON_BLOCKING);
+ if (sizeWritten > 0) {
+ if (audioBuffer.mPresentationTimeUs != -1) {
+ native_updateQueuedAudioData(
+ size, audioBuffer.mPresentationTimeUs);
+ audioBuffer.mPresentationTimeUs = -1;
+ }
+
+ if (sizeWritten == size) {
+ postReturnByteBuffer(audioBuffer);
+ mAudioBuffers.remove(0);
+ if (!mAudioBuffers.isEmpty()) {
+ postRenderAudio(0);
+ }
+ return;
+ }
+ }
+ long pendingTimeMs = TimeUnit.MICROSECONDS.toMillis(
+ native_getPlayTimeForPendingAudioFrames());
+ postRenderAudio(pendingTimeMs / 2);
+ }
+ }
+ }, delayMillis);
+ }
+
+ private native final void native_updateQueuedAudioData(
+ int sizeInBytes, long presentationTimeUs);
+
+ private native final long native_getPlayTimeForPendingAudioFrames();
+
+ private final void postReturnByteBuffer(@NonNull final AudioBuffer audioBuffer) {
+ synchronized(mCallbackLock) {
+ if (mCallbackHandler != null) {
+ final MediaSync sync = this;
+ mCallbackHandler.post(new Runnable() {
+ public void run() {
+ synchronized(mCallbackLock) {
+ if (mCallbackHandler == null
+ || mCallbackHandler.getLooper().getThread()
+ != Thread.currentThread()) {
+ // callback handler has been changed.
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer,
+ audioBuffer.mBufferIndex);
+ }
+ }
+ }
+ });
+ }
+ }
+ }
+
+ private final void returnAudioBuffers() {
+ synchronized(mAudioLock) {
+ for (AudioBuffer audioBuffer: mAudioBuffers) {
+ postReturnByteBuffer(audioBuffer);
+ }
+ mAudioBuffers.clear();
+ }
+ }
+
+ private void createAudioThread() {
+ mAudioThread = new Thread() {
+ @Override
+ public void run() {
+ Looper.prepare();
+ synchronized(mAudioLock) {
+ mAudioLooper = Looper.myLooper();
+ mAudioHandler = new Handler();
+ mAudioLock.notify();
+ }
+ Looper.loop();
+ }
+ };
+ mAudioThread.start();
+
+ synchronized(mAudioLock) {
+ try {
+ mAudioLock.wait();
+ } catch(InterruptedException e) {
+ }
+ }
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private static native final void native_init();
+}
diff --git a/media/java/android/media/MediaTimestamp.java b/media/java/android/media/MediaTimestamp.java
new file mode 100644
index 0000000..5ea6bbe
--- /dev/null
+++ b/media/java/android/media/MediaTimestamp.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An immutable object that represents the linear correlation between the media time
+ * and the system time. It contains the media clock rate, together with the media timestamp
+ * of an anchor frame and the system time when that frame was presented or is committed
+ * to be presented.
+ * <p>
+ * The phrase "present" means that audio/video produced on device is detectable by an external
+ * observer off device.
+ * The time is based on the implementation's best effort, using whatever knowledge
+ * is available to the system, but cannot account for any delay unknown to the implementation.
+ * The anchor frame could be any frame, including a just-rendered frame, or even a theoretical
+ * or in-between frame, based on the source of the MediaTimestamp.
+ * When the anchor frame is a just-rendered one, the media time stands for
+ * current position of the playback or recording.
+ *
+ * @see MediaSync#getTimestamp
+ * @see MediaPlayer#getTimestamp
+ */
+public final class MediaTimestamp
+{
+ /**
+ * Get the media time of the anchor in microseconds.
+ */
+ public long getAnchorMediaTimeUs() {
+ return mediaTimeUs;
+ }
+
+ /**
+ * Get the {@link java.lang.System#nanoTime system time} corresponding to the media time
+ * in nanoseconds.
+ */
+ public long getAnchorSytemNanoTime() {
+ return nanoTime;
+ }
+
+ /**
+ * Get the rate of the media clock in relation to the system time.
+ * <p>
+ * It is 1.0 if media clock advances in sync with the system clock;
+ * greater than 1.0 if media clock is faster than the system clock;
+ * less than 1.0 if media clock is slower than the system clock.
+ */
+ public float getMediaClockRate() {
+ return clockRate;
+ }
+
+ /** @hide - accessor shorthand */
+ public final long mediaTimeUs;
+ /** @hide - accessor shorthand */
+ public final long nanoTime;
+ /** @hide - accessor shorthand */
+ public final float clockRate;
+
+ /** @hide */
+ MediaTimestamp(long mediaUs, long systemNs, float rate) {
+ mediaTimeUs = mediaUs;
+ nanoTime = systemNs;
+ clockRate = rate;
+ }
+
+ /** @hide */
+ MediaTimestamp() {
+ mediaTimeUs = 0;
+ nanoTime = 0;
+ clockRate = 1.0f;
+ }
+}
diff --git a/media/java/android/media/PlaybackParams.aidl b/media/java/android/media/PlaybackParams.aidl
new file mode 100644
index 0000000..0356117
--- /dev/null
+++ b/media/java/android/media/PlaybackParams.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable PlaybackParams;
diff --git a/media/java/android/media/PlaybackParams.java b/media/java/android/media/PlaybackParams.java
new file mode 100644
index 0000000..021dbf2
--- /dev/null
+++ b/media/java/android/media/PlaybackParams.java
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Structure for common playback params.
+ *
+ * Used by {@link AudioTrack} {@link AudioTrack#getPlaybackParams()} and
+ * {@link AudioTrack#setPlaybackParams(PlaybackParams)}
+ * to control playback behavior.
+ * <p> <strong>audio fallback mode:</strong>
+ * select out-of-range parameter handling.
+ * <ul>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_MUTE}:
+ * Play silence for params normally out of range.</li>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_FAIL}:
+ * Return {@link java.lang.IllegalArgumentException} from
+ * <code>AudioTrack.setPlaybackParams(PlaybackParams)</code>.</li>
+ * </ul>
+ * <p> <strong>pitch:</strong> increases or decreases the tonal frequency of the audio content.
+ * It is expressed as a multiplicative factor, where normal pitch is 1.0f.
+ * <p> <strong>speed:</strong> increases or decreases the time to
+ * play back a set of audio or video frames.
+ * It is expressed as a multiplicative factor, where normal speed is 1.0f.
+ * <p> Different combinations of speed and pitch may be used for audio playback;
+ * some common ones:
+ * <ul>
+ * <li> <em>Pitch equals 1.0f.</em> Speed change will be done with pitch preserved,
+ * often called <em>timestretching</em>.</li>
+ * <li> <em>Pitch equals speed.</em> Speed change will be done by <em>resampling</em>,
+ * similar to {@link AudioTrack#setPlaybackRate(int)}.</li>
+ * </ul>
+ */
+public final class PlaybackParams implements Parcelable {
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_FALLBACK_MODE_DEFAULT,
+ AUDIO_FALLBACK_MODE_MUTE,
+ AUDIO_FALLBACK_MODE_FAIL,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioFallbackMode {}
+ public static final int AUDIO_FALLBACK_MODE_DEFAULT = 0;
+ public static final int AUDIO_FALLBACK_MODE_MUTE = 1;
+ public static final int AUDIO_FALLBACK_MODE_FAIL = 2;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_STRETCH_MODE_DEFAULT,
+ AUDIO_STRETCH_MODE_VOICE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioStretchMode {}
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_DEFAULT = 0;
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_VOICE = 1;
+
+ // flags to indicate which params are actually set
+ private static final int SET_SPEED = 1 << 0;
+ private static final int SET_PITCH = 1 << 1;
+ private static final int SET_AUDIO_FALLBACK_MODE = 1 << 2;
+ private static final int SET_AUDIO_STRETCH_MODE = 1 << 3;
+ private int mSet = 0;
+
+ // params
+ private int mAudioFallbackMode = AUDIO_FALLBACK_MODE_DEFAULT;
+ private int mAudioStretchMode = AUDIO_STRETCH_MODE_DEFAULT;
+ private float mPitch = 1.0f;
+ private float mSpeed = 1.0f;
+
+ public PlaybackParams() {
+ }
+
+ private PlaybackParams(Parcel in) {
+ mSet = in.readInt();
+ mAudioFallbackMode = in.readInt();
+ mAudioStretchMode = in.readInt();
+ mPitch = in.readFloat();
+ if (mPitch < 0.f) {
+ mPitch = 0.f;
+ }
+ mSpeed = in.readFloat();
+ }
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams allowDefaults() {
+ mSet |= SET_AUDIO_FALLBACK_MODE | SET_AUDIO_STRETCH_MODE | SET_PITCH | SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Sets the audio fallback mode.
+ * @param audioFallbackMode
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setAudioFallbackMode(@AudioFallbackMode int audioFallbackMode) {
+ mAudioFallbackMode = audioFallbackMode;
+ mSet |= SET_AUDIO_FALLBACK_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio fallback mode.
+ * @return audio fallback mode
+ * @throws IllegalStateException if the audio fallback mode is not set.
+ */
+ public @AudioFallbackMode int getAudioFallbackMode() {
+ if ((mSet & SET_AUDIO_FALLBACK_MODE) == 0) {
+ throw new IllegalStateException("audio fallback mode not set");
+ }
+ return mAudioFallbackMode;
+ }
+
+ /**
+ * @hide
+ * Sets the audio stretch mode.
+ * @param audioStretchMode
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setAudioStretchMode(@AudioStretchMode int audioStretchMode) {
+ mAudioStretchMode = audioStretchMode;
+ mSet |= SET_AUDIO_STRETCH_MODE;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Retrieves the audio stretch mode.
+ * @return audio stretch mode
+ * @throws IllegalStateException if the audio stretch mode is not set.
+ */
+ public @AudioStretchMode int getAudioStretchMode() {
+ if ((mSet & SET_AUDIO_STRETCH_MODE) == 0) {
+ throw new IllegalStateException("audio stretch mode not set");
+ }
+ return mAudioStretchMode;
+ }
+
+ /**
+ * Sets the pitch factor.
+ * @param pitch
+ * @return this <code>PlaybackParams</code> instance.
+ * @throws InvalidArgumentException if the pitch is negative
+ */
+ public PlaybackParams setPitch(float pitch) {
+ if (pitch < 0.f) {
+ throw new IllegalArgumentException("pitch must not be negative");
+ }
+ mPitch = pitch;
+ mSet |= SET_PITCH;
+ return this;
+ }
+
+ /**
+ * Retrieves the pitch factor.
+ * @return pitch
+ * @throws IllegalStateException if pitch is not set.
+ */
+ public float getPitch() {
+ if ((mSet & SET_PITCH) == 0) {
+ throw new IllegalStateException("pitch not set");
+ }
+ return mPitch;
+ }
+
+ /**
+ * Sets the speed factor.
+ * @param speed
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setSpeed(float speed) {
+ mSpeed = speed;
+ mSet |= SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Retrieves the speed factor.
+ * @return speed
+ * @throws IllegalStateException if speed is not set.
+ */
+ public float getSpeed() {
+ if ((mSet & SET_SPEED) == 0) {
+ throw new IllegalStateException("speed not set");
+ }
+ return mSpeed;
+ }
+
+ public static final Parcelable.Creator<PlaybackParams> CREATOR =
+ new Parcelable.Creator<PlaybackParams>() {
+ @Override
+ public PlaybackParams createFromParcel(Parcel in) {
+ return new PlaybackParams(in);
+ }
+
+ @Override
+ public PlaybackParams[] newArray(int size) {
+ return new PlaybackParams[size];
+ }
+ };
+
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mSet);
+ dest.writeInt(mAudioFallbackMode);
+ dest.writeInt(mAudioStretchMode);
+ dest.writeFloat(mPitch);
+ dest.writeFloat(mSpeed);
+ }
+}
diff --git a/media/java/android/media/PlayerRecord.java b/media/java/android/media/PlayerRecord.java
deleted file mode 100644
index 664ddcf..0000000
--- a/media/java/android/media/PlayerRecord.java
+++ /dev/null
@@ -1,357 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.app.PendingIntent;
-import android.content.ComponentName;
-import android.os.Binder;
-import android.os.IBinder;
-import android.os.IBinder.DeathRecipient;
-import android.os.RemoteException;
-import android.util.Log;
-
-import java.io.PrintWriter;
-
-/**
- * @hide
- * Class to handle all the information about a media player, encapsulating information
- * about its use RemoteControlClient, playback type and volume... The lifecycle of each
- * instance is managed by android.media.MediaFocusControl, from its addition to the player stack
- * stack to its release.
- */
-class PlayerRecord implements DeathRecipient {
-
- // on purpose not using this classe's name, as it will only be used from MediaFocusControl
- private static final String TAG = "MediaFocusControl";
- private static final boolean DEBUG = false;
-
- /**
- * A global counter for RemoteControlClient identifiers
- */
- private static int sLastRccId = 0;
-
- public static MediaFocusControl sController;
-
- /**
- * The target for the ACTION_MEDIA_BUTTON events.
- * Always non null. //FIXME verify
- */
- final private PendingIntent mMediaIntent;
- /**
- * The registered media button event receiver.
- */
- final private ComponentName mReceiverComponent;
-
- private int mRccId = -1;
-
- /**
- * A non-null token implies this record tracks a "live" player whose death is being monitored.
- */
- private IBinder mToken;
- private String mCallingPackageName;
- private int mCallingUid;
- /**
- * Provides access to the information to display on the remote control.
- * May be null (when a media button event receiver is registered,
- * but no remote control client has been registered) */
- private IRemoteControlClient mRcClient;
- private RcClientDeathHandler mRcClientDeathHandler;
- /**
- * Information only used for non-local playback
- */
- //FIXME private?
- public int mPlaybackType;
- public int mPlaybackVolume;
- public int mPlaybackVolumeMax;
- public int mPlaybackVolumeHandling;
- public int mPlaybackStream;
- public RccPlaybackState mPlaybackState;
- public IRemoteVolumeObserver mRemoteVolumeObs;
-
-
- protected static class RccPlaybackState {
- public int mState;
- public long mPositionMs;
- public float mSpeed;
-
- public RccPlaybackState(int state, long positionMs, float speed) {
- mState = state;
- mPositionMs = positionMs;
- mSpeed = speed;
- }
-
- public void reset() {
- mState = RemoteControlClient.PLAYSTATE_STOPPED;
- mPositionMs = RemoteControlClient.PLAYBACK_POSITION_INVALID;
- mSpeed = RemoteControlClient.PLAYBACK_SPEED_1X;
- }
-
- @Override
- public String toString() {
- return stateToString() + ", " + posToString() + ", " + mSpeed + "X";
- }
-
- private String posToString() {
- if (mPositionMs == RemoteControlClient.PLAYBACK_POSITION_INVALID) {
- return "PLAYBACK_POSITION_INVALID";
- } else if (mPositionMs == RemoteControlClient.PLAYBACK_POSITION_ALWAYS_UNKNOWN) {
- return "PLAYBACK_POSITION_ALWAYS_UNKNOWN";
- } else {
- return (String.valueOf(mPositionMs) + "ms");
- }
- }
-
- private String stateToString() {
- switch (mState) {
- case RemoteControlClient.PLAYSTATE_NONE:
- return "PLAYSTATE_NONE";
- case RemoteControlClient.PLAYSTATE_STOPPED:
- return "PLAYSTATE_STOPPED";
- case RemoteControlClient.PLAYSTATE_PAUSED:
- return "PLAYSTATE_PAUSED";
- case RemoteControlClient.PLAYSTATE_PLAYING:
- return "PLAYSTATE_PLAYING";
- case RemoteControlClient.PLAYSTATE_FAST_FORWARDING:
- return "PLAYSTATE_FAST_FORWARDING";
- case RemoteControlClient.PLAYSTATE_REWINDING:
- return "PLAYSTATE_REWINDING";
- case RemoteControlClient.PLAYSTATE_SKIPPING_FORWARDS:
- return "PLAYSTATE_SKIPPING_FORWARDS";
- case RemoteControlClient.PLAYSTATE_SKIPPING_BACKWARDS:
- return "PLAYSTATE_SKIPPING_BACKWARDS";
- case RemoteControlClient.PLAYSTATE_BUFFERING:
- return "PLAYSTATE_BUFFERING";
- case RemoteControlClient.PLAYSTATE_ERROR:
- return "PLAYSTATE_ERROR";
- default:
- return "[invalid playstate]";
- }
- }
- }
-
-
- /**
- * Inner class to monitor remote control client deaths, and remove the client for the
- * remote control stack if necessary.
- */
- private class RcClientDeathHandler implements IBinder.DeathRecipient {
- final private IBinder mCb; // To be notified of client's death
- //FIXME needed?
- final private PendingIntent mMediaIntent;
-
- RcClientDeathHandler(IBinder cb, PendingIntent pi) {
- mCb = cb;
- mMediaIntent = pi;
- }
-
- public void binderDied() {
- Log.w(TAG, " RemoteControlClient died");
- // remote control client died, make sure the displays don't use it anymore
- // by setting its remote control client to null
- sController.registerRemoteControlClient(mMediaIntent, null/*rcClient*/, null/*ignored*/);
- // the dead client was maybe handling remote playback, the controller should reevaluate
- sController.postReevaluateRemote();
- }
-
- public IBinder getBinder() {
- return mCb;
- }
- }
-
-
- protected static class RemotePlaybackState {
- int mRccId;
- int mVolume;
- int mVolumeMax;
- int mVolumeHandling;
-
- protected RemotePlaybackState(int id, int vol, int volMax) {
- mRccId = id;
- mVolume = vol;
- mVolumeMax = volMax;
- mVolumeHandling = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME_HANDLING;
- }
- }
-
-
- void dump(PrintWriter pw, boolean registrationInfo) {
- if (registrationInfo) {
- pw.println(" pi: " + mMediaIntent +
- " -- pack: " + mCallingPackageName +
- " -- ercvr: " + mReceiverComponent +
- " -- client: " + mRcClient +
- " -- uid: " + mCallingUid +
- " -- type: " + mPlaybackType +
- " state: " + mPlaybackState);
- } else {
- // emphasis on state
- pw.println(" uid: " + mCallingUid +
- " -- id: " + mRccId +
- " -- type: " + mPlaybackType +
- " -- state: " + mPlaybackState +
- " -- vol handling: " + mPlaybackVolumeHandling +
- " -- vol: " + mPlaybackVolume +
- " -- volMax: " + mPlaybackVolumeMax +
- " -- volObs: " + mRemoteVolumeObs);
- }
- }
-
-
- static protected void setMediaFocusControl(MediaFocusControl mfc) {
- sController = mfc;
- }
-
- /** precondition: mediaIntent != null */
- protected PlayerRecord(PendingIntent mediaIntent, ComponentName eventReceiver, IBinder token)
- {
- mMediaIntent = mediaIntent;
- mReceiverComponent = eventReceiver;
- mToken = token;
- mCallingUid = -1;
- mRcClient = null;
- mRccId = ++sLastRccId;
- mPlaybackState = new RccPlaybackState(
- RemoteControlClient.PLAYSTATE_STOPPED,
- RemoteControlClient.PLAYBACK_POSITION_INVALID,
- RemoteControlClient.PLAYBACK_SPEED_1X);
-
- resetPlaybackInfo();
- if (mToken != null) {
- try {
- mToken.linkToDeath(this, 0);
- } catch (RemoteException e) {
- sController.unregisterMediaButtonIntentAsync(mMediaIntent);
- }
- }
- }
-
- //---------------------------------------------
- // Accessors
- protected int getRccId() {
- return mRccId;
- }
-
- protected IRemoteControlClient getRcc() {
- return mRcClient;
- }
-
- protected ComponentName getMediaButtonReceiver() {
- return mReceiverComponent;
- }
-
- protected PendingIntent getMediaButtonIntent() {
- return mMediaIntent;
- }
-
- protected boolean hasMatchingMediaButtonIntent(PendingIntent pi) {
- if (mToken != null) {
- return mMediaIntent.equals(pi);
- } else {
- if (mReceiverComponent != null) {
- return mReceiverComponent.equals(pi.getIntent().getComponent());
- } else {
- return false;
- }
- }
- }
-
- protected boolean isPlaybackActive() {
- return MediaFocusControl.isPlaystateActive(mPlaybackState.mState);
- }
-
- //---------------------------------------------
- // Modify the records stored in the instance
- protected void resetControllerInfoForRcc(IRemoteControlClient rcClient,
- String callingPackageName, int uid) {
- // already had a remote control client?
- if (mRcClientDeathHandler != null) {
- // stop monitoring the old client's death
- unlinkToRcClientDeath();
- }
- // save the new remote control client
- mRcClient = rcClient;
- mCallingPackageName = callingPackageName;
- mCallingUid = uid;
- if (rcClient == null) {
- // here mcse.mRcClientDeathHandler is null;
- resetPlaybackInfo();
- } else {
- IBinder b = mRcClient.asBinder();
- RcClientDeathHandler rcdh =
- new RcClientDeathHandler(b, mMediaIntent);
- try {
- b.linkToDeath(rcdh, 0);
- } catch (RemoteException e) {
- // remote control client is DOA, disqualify it
- Log.w(TAG, "registerRemoteControlClient() has a dead client " + b);
- mRcClient = null;
- }
- mRcClientDeathHandler = rcdh;
- }
- }
-
- protected void resetControllerInfoForNoRcc() {
- // stop monitoring the RCC death
- unlinkToRcClientDeath();
- // reset the RCC-related fields
- mRcClient = null;
- mCallingPackageName = null;
- }
-
- public void resetPlaybackInfo() {
- mPlaybackType = RemoteControlClient.PLAYBACK_TYPE_LOCAL;
- mPlaybackVolume = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME;
- mPlaybackVolumeMax = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME;
- mPlaybackVolumeHandling = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME_HANDLING;
- mPlaybackStream = AudioManager.STREAM_MUSIC;
- mPlaybackState.reset();
- mRemoteVolumeObs = null;
- }
-
- //---------------------------------------------
- public void unlinkToRcClientDeath() {
- if ((mRcClientDeathHandler != null) && (mRcClientDeathHandler.mCb != null)) {
- try {
- mRcClientDeathHandler.mCb.unlinkToDeath(mRcClientDeathHandler, 0);
- mRcClientDeathHandler = null;
- } catch (java.util.NoSuchElementException e) {
- // not much we can do here
- Log.e(TAG, "Error in unlinkToRcClientDeath()", e);
- }
- }
- }
-
- // FIXME rename to "release"? (as in FocusRequester class)
- public void destroy() {
- unlinkToRcClientDeath();
- if (mToken != null) {
- mToken.unlinkToDeath(this, 0);
- mToken = null;
- }
- }
-
- @Override
- public void binderDied() {
- sController.unregisterMediaButtonIntentAsync(mMediaIntent);
- }
-
- @Override
- protected void finalize() throws Throwable {
- destroy(); // unlink exception handled inside method
- super.finalize();
- }
-}
diff --git a/media/java/android/media/RemoteControlClient.java b/media/java/android/media/RemoteControlClient.java
index 1b6536f..c9a86d8 100644
--- a/media/java/android/media/RemoteControlClient.java
+++ b/media/java/android/media/RemoteControlClient.java
@@ -27,7 +27,6 @@ import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
-import android.os.ServiceManager;
import android.os.SystemClock;
import android.util.Log;
diff --git a/media/java/android/media/RemoteDisplay.java b/media/java/android/media/RemoteDisplay.java
index 4e937a5..5add65a 100644
--- a/media/java/android/media/RemoteDisplay.java
+++ b/media/java/android/media/RemoteDisplay.java
@@ -37,17 +37,19 @@ public final class RemoteDisplay {
private final CloseGuard mGuard = CloseGuard.get();
private final Listener mListener;
private final Handler mHandler;
+ private final String mOpPackageName;
private long mPtr;
- private native long nativeListen(String iface);
+ private native long nativeListen(String iface, String opPackageName);
private native void nativeDispose(long ptr);
private native void nativePause(long ptr);
private native void nativeResume(long ptr);
- private RemoteDisplay(Listener listener, Handler handler) {
+ private RemoteDisplay(Listener listener, Handler handler, String opPackageName) {
mListener = listener;
mHandler = handler;
+ mOpPackageName = opPackageName;
}
@Override
@@ -66,7 +68,8 @@ public final class RemoteDisplay {
* @param listener The listener to invoke when displays are connected or disconnected.
* @param handler The handler on which to invoke the listener.
*/
- public static RemoteDisplay listen(String iface, Listener listener, Handler handler) {
+ public static RemoteDisplay listen(String iface, Listener listener, Handler handler,
+ String opPackageName) {
if (iface == null) {
throw new IllegalArgumentException("iface must not be null");
}
@@ -77,7 +80,7 @@ public final class RemoteDisplay {
throw new IllegalArgumentException("handler must not be null");
}
- RemoteDisplay display = new RemoteDisplay(listener, handler);
+ RemoteDisplay display = new RemoteDisplay(listener, handler, opPackageName);
display.startListening(iface);
return display;
}
@@ -113,7 +116,7 @@ public final class RemoteDisplay {
}
private void startListening(String iface) {
- mPtr = nativeListen(iface);
+ mPtr = nativeListen(iface, mOpPackageName);
if (mPtr == 0) {
throw new IllegalStateException("Could not start listening for "
+ "remote display connection on \"" + iface + "\"");
diff --git a/media/java/android/media/RingtoneManager.java b/media/java/android/media/RingtoneManager.java
index e211b99..a1b8a3b 100644
--- a/media/java/android/media/RingtoneManager.java
+++ b/media/java/android/media/RingtoneManager.java
@@ -158,7 +158,16 @@ public class RingtoneManager {
* in most cases.
*/
public static final String EXTRA_RINGTONE_TITLE = "android.intent.extra.ringtone.TITLE";
-
+
+ /**
+ * @hide
+ * Given to the ringtone picker as an int. Additional AudioAttributes flags to use
+ * when playing the ringtone in the picker.
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_AUDIO_ATTRIBUTES_FLAGS =
+ "android.intent.extra.ringtone.AUDIO_ATTRIBUTES_FLAGS";
+
/**
* Returned from the ringtone picker as a {@link Uri}.
* <p>
@@ -221,7 +230,7 @@ public class RingtoneManager {
private boolean mStopPreviousRingtone = true;
private Ringtone mPreviousRingtone;
-
+
/**
* Constructs a RingtoneManager. This constructor is recommended as its
* constructed instance manages cursor(s).
@@ -283,7 +292,7 @@ public class RingtoneManager {
return AudioManager.STREAM_RING;
}
}
-
+
/**
* Whether retrieving another {@link Ringtone} will stop playing the
* previously retrieved {@link Ringtone}.
diff --git a/media/java/android/media/SoundPool.java b/media/java/android/media/SoundPool.java
index 32d5b82..64863c2 100644
--- a/media/java/android/media/SoundPool.java
+++ b/media/java/android/media/SoundPool.java
@@ -32,7 +32,6 @@ import android.os.ParcelFileDescriptor;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
-import android.os.SystemProperties;
import android.util.AndroidRuntimeException;
import android.util.Log;
@@ -112,7 +111,24 @@ import com.android.internal.app.IAppOpsService;
* resumes.</p>
*/
public class SoundPool {
- private final SoundPoolDelegate mImpl;
+ static { System.loadLibrary("soundpool"); }
+
+ // SoundPool messages
+ //
+ // must match SoundPool.h
+ private static final int SAMPLE_LOADED = 1;
+
+ private final static String TAG = "SoundPool";
+ private final static boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ private long mNativeContext; // accessed by native methods
+
+ private EventHandler mEventHandler;
+ private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
+
+ private final Object mLock;
+ private final AudioAttributes mAttributes;
+ private final IAppOpsService mAppOps;
/**
* Constructor. Constructs a SoundPool object with the following
@@ -135,68 +151,26 @@ public class SoundPool {
}
private SoundPool(int maxStreams, AudioAttributes attributes) {
- if (SystemProperties.getBoolean("config.disable_media", false)) {
- mImpl = new SoundPoolStub();
- } else {
- mImpl = new SoundPoolImpl(this, maxStreams, attributes);
+ // do native setup
+ if (native_setup(new WeakReference<SoundPool>(this), maxStreams, attributes) != 0) {
+ throw new RuntimeException("Native setup failed");
}
+ mLock = new Object();
+ mAttributes = attributes;
+ IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
+ mAppOps = IAppOpsService.Stub.asInterface(b);
}
/**
- * Builder class for {@link SoundPool} objects.
+ * Release the SoundPool resources.
+ *
+ * Release all memory and native resources used by the SoundPool
+ * object. The SoundPool can no longer be used and the reference
+ * should be set to null.
*/
- public static class Builder {
- private int mMaxStreams = 1;
- private AudioAttributes mAudioAttributes;
+ public native final void release();
- /**
- * Constructs a new Builder with the defaults format values.
- * If not provided, the maximum number of streams is 1 (see {@link #setMaxStreams(int)} to
- * change it), and the audio attributes have a usage value of
- * {@link AudioAttributes#USAGE_MEDIA} (see {@link #setAudioAttributes(AudioAttributes)} to
- * change them).
- */
- public Builder() {
- }
-
- /**
- * Sets the maximum of number of simultaneous streams that can be played simultaneously.
- * @param maxStreams a value equal to 1 or greater.
- * @return the same Builder instance
- * @throws IllegalArgumentException
- */
- public Builder setMaxStreams(int maxStreams) throws IllegalArgumentException {
- if (maxStreams <= 0) {
- throw new IllegalArgumentException(
- "Strictly positive value required for the maximum number of streams");
- }
- mMaxStreams = maxStreams;
- return this;
- }
-
- /**
- * Sets the {@link AudioAttributes}. For examples, game applications will use attributes
- * built with usage information set to {@link AudioAttributes#USAGE_GAME}.
- * @param attributes a non-null
- * @return
- */
- public Builder setAudioAttributes(AudioAttributes attributes)
- throws IllegalArgumentException {
- if (attributes == null) {
- throw new IllegalArgumentException("Invalid null AudioAttributes");
- }
- mAudioAttributes = attributes;
- return this;
- }
-
- public SoundPool build() {
- if (mAudioAttributes == null) {
- mAudioAttributes = new AudioAttributes.Builder()
- .setUsage(AudioAttributes.USAGE_MEDIA).build();
- }
- return new SoundPool(mMaxStreams, mAudioAttributes);
- }
- }
+ protected void finalize() { release(); }
/**
* Load the sound from the specified path.
@@ -207,7 +181,19 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(String path, int priority) {
- return mImpl.load(path, priority);
+ int id = 0;
+ try {
+ File f = new File(path);
+ ParcelFileDescriptor fd = ParcelFileDescriptor.open(f,
+ ParcelFileDescriptor.MODE_READ_ONLY);
+ if (fd != null) {
+ id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
+ fd.close();
+ }
+ } catch (java.io.IOException e) {
+ Log.e(TAG, "error loading " + path);
+ }
+ return id;
}
/**
@@ -226,7 +212,17 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(Context context, int resId, int priority) {
- return mImpl.load(context, resId, priority);
+ AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
+ int id = 0;
+ if (afd != null) {
+ id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
+ try {
+ afd.close();
+ } catch (java.io.IOException ex) {
+ //Log.d(TAG, "close failed:", ex);
+ }
+ }
+ return id;
}
/**
@@ -238,7 +234,15 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(AssetFileDescriptor afd, int priority) {
- return mImpl.load(afd, priority);
+ if (afd != null) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
+ } else {
+ return 0;
+ }
}
/**
@@ -256,7 +260,7 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(FileDescriptor fd, long offset, long length, int priority) {
- return mImpl.load(fd, offset, length, priority);
+ return _load(fd, offset, length, priority);
}
/**
@@ -269,9 +273,7 @@ public class SoundPool {
* @param soundID a soundID returned by the load() function
* @return true if just unloaded, false if previously unloaded
*/
- public final boolean unload(int soundID) {
- return mImpl.unload(soundID);
- }
+ public native final boolean unload(int soundID);
/**
* Play a sound from a sound ID.
@@ -299,8 +301,10 @@ public class SoundPool {
*/
public final int play(int soundID, float leftVolume, float rightVolume,
int priority, int loop, float rate) {
- return mImpl.play(
- soundID, leftVolume, rightVolume, priority, loop, rate);
+ if (isRestricted()) {
+ leftVolume = rightVolume = 0;
+ }
+ return _play(soundID, leftVolume, rightVolume, priority, loop, rate);
}
/**
@@ -314,9 +318,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void pause(int streamID) {
- mImpl.pause(streamID);
- }
+ public native final void pause(int streamID);
/**
* Resume a playback stream.
@@ -328,9 +330,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void resume(int streamID) {
- mImpl.resume(streamID);
- }
+ public native final void resume(int streamID);
/**
* Pause all active streams.
@@ -340,9 +340,7 @@ public class SoundPool {
* are playing. It also sets a flag so that any streams that
* are playing can be resumed by calling autoResume().
*/
- public final void autoPause() {
- mImpl.autoPause();
- }
+ public native final void autoPause();
/**
* Resume all previously active streams.
@@ -350,9 +348,7 @@ public class SoundPool {
* Automatically resumes all streams that were paused in previous
* calls to autoPause().
*/
- public final void autoResume() {
- mImpl.autoResume();
- }
+ public native final void autoResume();
/**
* Stop a playback stream.
@@ -365,9 +361,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void stop(int streamID) {
- mImpl.stop(streamID);
- }
+ public native final void stop(int streamID);
/**
* Set stream volume.
@@ -381,9 +375,11 @@ public class SoundPool {
* @param leftVolume left volume value (range = 0.0 to 1.0)
* @param rightVolume right volume value (range = 0.0 to 1.0)
*/
- public final void setVolume(int streamID,
- float leftVolume, float rightVolume) {
- mImpl.setVolume(streamID, leftVolume, rightVolume);
+ public final void setVolume(int streamID, float leftVolume, float rightVolume) {
+ if (isRestricted()) {
+ return;
+ }
+ _setVolume(streamID, leftVolume, rightVolume);
}
/**
@@ -404,9 +400,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void setPriority(int streamID, int priority) {
- mImpl.setPriority(streamID, priority);
- }
+ public native final void setPriority(int streamID, int priority);
/**
* Set loop mode.
@@ -419,9 +413,7 @@ public class SoundPool {
* @param streamID a streamID returned by the play() function
* @param loop loop mode (0 = no loop, -1 = loop forever)
*/
- public final void setLoop(int streamID, int loop) {
- mImpl.setLoop(streamID, loop);
- }
+ public native final void setLoop(int streamID, int loop);
/**
* Change playback rate.
@@ -435,9 +427,7 @@ public class SoundPool {
* @param streamID a streamID returned by the play() function
* @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
*/
- public final void setRate(int streamID, float rate) {
- mImpl.setRate(streamID, rate);
- }
+ public native final void setRate(int streamID, float rate);
public interface OnLoadCompleteListener {
/**
@@ -454,353 +444,137 @@ public class SoundPool {
* Sets the callback hook for the OnLoadCompleteListener.
*/
public void setOnLoadCompleteListener(OnLoadCompleteListener listener) {
- mImpl.setOnLoadCompleteListener(listener);
- }
-
- /**
- * Release the SoundPool resources.
- *
- * Release all memory and native resources used by the SoundPool
- * object. The SoundPool can no longer be used and the reference
- * should be set to null.
- */
- public final void release() {
- mImpl.release();
- }
-
- /**
- * Interface for SoundPool implementations.
- * SoundPool is statically referenced and unconditionally called from all
- * over the framework, so we can't simply omit the class or make it throw
- * runtime exceptions, as doing so would break the framework. Instead we
- * now select either a real or no-op impl object based on whether media is
- * enabled.
- *
- * @hide
- */
- /* package */ interface SoundPoolDelegate {
- public int load(String path, int priority);
- public int load(Context context, int resId, int priority);
- public int load(AssetFileDescriptor afd, int priority);
- public int load(
- FileDescriptor fd, long offset, long length, int priority);
- public boolean unload(int soundID);
- public int play(
- int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
- public void pause(int streamID);
- public void resume(int streamID);
- public void autoPause();
- public void autoResume();
- public void stop(int streamID);
- public void setVolume(int streamID, float leftVolume, float rightVolume);
- public void setVolume(int streamID, float volume);
- public void setPriority(int streamID, int priority);
- public void setLoop(int streamID, int loop);
- public void setRate(int streamID, float rate);
- public void setOnLoadCompleteListener(OnLoadCompleteListener listener);
- public void release();
- }
-
-
- /**
- * Real implementation of the delegate interface. This was formerly the
- * body of SoundPool itself.
- */
- /* package */ static class SoundPoolImpl implements SoundPoolDelegate {
- static { System.loadLibrary("soundpool"); }
-
- private final static String TAG = "SoundPool";
- private final static boolean DEBUG = false;
-
- private long mNativeContext; // accessed by native methods
-
- private EventHandler mEventHandler;
- private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
- private SoundPool mProxy;
-
- private final Object mLock;
- private final AudioAttributes mAttributes;
- private final IAppOpsService mAppOps;
-
- // SoundPool messages
- //
- // must match SoundPool.h
- private static final int SAMPLE_LOADED = 1;
-
- public SoundPoolImpl(SoundPool proxy, int maxStreams, AudioAttributes attr) {
-
- // do native setup
- if (native_setup(new WeakReference(this), maxStreams, attr) != 0) {
- throw new RuntimeException("Native setup failed");
- }
- mLock = new Object();
- mProxy = proxy;
- mAttributes = attr;
- IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
- mAppOps = IAppOpsService.Stub.asInterface(b);
- }
-
- public int load(String path, int priority)
- {
- int id = 0;
- try {
- File f = new File(path);
- ParcelFileDescriptor fd = ParcelFileDescriptor.open(f, ParcelFileDescriptor.MODE_READ_ONLY);
- if (fd != null) {
- id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
- fd.close();
- }
- } catch (java.io.IOException e) {
- Log.e(TAG, "error loading " + path);
- }
- return id;
- }
-
- @Override
- public int load(Context context, int resId, int priority) {
- AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
- int id = 0;
- if (afd != null) {
- id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
- try {
- afd.close();
- } catch (java.io.IOException ex) {
- //Log.d(TAG, "close failed:", ex);
- }
- }
- return id;
- }
-
- @Override
- public int load(AssetFileDescriptor afd, int priority) {
- if (afd != null) {
- long len = afd.getLength();
- if (len < 0) {
- throw new AndroidRuntimeException("no length for fd");
+ synchronized(mLock) {
+ if (listener != null) {
+ // setup message handler
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else {
+ mEventHandler = null;
}
- return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
} else {
- return 0;
+ mEventHandler = null;
}
+ mOnLoadCompleteListener = listener;
}
+ }
- @Override
- public int load(FileDescriptor fd, long offset, long length, int priority) {
- return _load(fd, offset, length, priority);
- }
-
- private native final int _load(FileDescriptor fd, long offset, long length, int priority);
-
- @Override
- public native final boolean unload(int soundID);
-
- @Override
- public final int play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate) {
- if (isRestricted()) {
- leftVolume = rightVolume = 0;
- }
- return _play(soundID, leftVolume, rightVolume, priority, loop, rate);
+ private boolean isRestricted() {
+ if ((mAttributes.getAllFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
+ return false;
}
-
- public native final int _play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
-
- private boolean isRestricted() {
- try {
- final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO,
- mAttributes.getUsage(),
- Process.myUid(), ActivityThread.currentPackageName());
- return mode != AppOpsManager.MODE_ALLOWED;
- } catch (RemoteException e) {
- return false;
- }
+ try {
+ final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO,
+ mAttributes.getUsage(),
+ Process.myUid(), ActivityThread.currentPackageName());
+ return mode != AppOpsManager.MODE_ALLOWED;
+ } catch (RemoteException e) {
+ return false;
}
+ }
- @Override
- public native final void pause(int streamID);
+ private native final int _load(FileDescriptor fd, long offset, long length, int priority);
- @Override
- public native final void resume(int streamID);
+ private native final int native_setup(Object weakRef, int maxStreams,
+ Object/*AudioAttributes*/ attributes);
- @Override
- public native final void autoPause();
+ private native final int _play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate);
- @Override
- public native final void autoResume();
+ private native final void _setVolume(int streamID, float leftVolume, float rightVolume);
- @Override
- public native final void stop(int streamID);
+ // post event from native code to message handler
+ @SuppressWarnings("unchecked")
+ private static void postEventFromNative(Object ref, int msg, int arg1, int arg2, Object obj) {
+ SoundPool soundPool = ((WeakReference<SoundPool>) ref).get();
+ if (soundPool == null)
+ return;
- @Override
- public final void setVolume(int streamID, float leftVolume, float rightVolume) {
- if (isRestricted()) {
- return;
- }
- _setVolume(streamID, leftVolume, rightVolume);
+ if (soundPool.mEventHandler != null) {
+ Message m = soundPool.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
+ soundPool.mEventHandler.sendMessage(m);
}
+ }
- private native final void _setVolume(int streamID, float leftVolume, float rightVolume);
-
- @Override
- public void setVolume(int streamID, float volume) {
- setVolume(streamID, volume, volume);
+ private final class EventHandler extends Handler {
+ public EventHandler(Looper looper) {
+ super(looper);
}
@Override
- public native final void setPriority(int streamID, int priority);
-
- @Override
- public native final void setLoop(int streamID, int loop);
-
- @Override
- public native final void setRate(int streamID, float rate);
-
- @Override
- public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener)
- {
- synchronized(mLock) {
- if (listener != null) {
- // setup message handler
- Looper looper;
- if ((looper = Looper.myLooper()) != null) {
- mEventHandler = new EventHandler(mProxy, looper);
- } else if ((looper = Looper.getMainLooper()) != null) {
- mEventHandler = new EventHandler(mProxy, looper);
- } else {
- mEventHandler = null;
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case SAMPLE_LOADED:
+ if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
+ synchronized(mLock) {
+ if (mOnLoadCompleteListener != null) {
+ mOnLoadCompleteListener.onLoadComplete(SoundPool.this, msg.arg1, msg.arg2);
}
- } else {
- mEventHandler = null;
}
- mOnLoadCompleteListener = listener;
- }
- }
-
- private class EventHandler extends Handler
- {
- private SoundPool mSoundPool;
-
- public EventHandler(SoundPool soundPool, Looper looper) {
- super(looper);
- mSoundPool = soundPool;
- }
-
- @Override
- public void handleMessage(Message msg) {
- switch(msg.what) {
- case SAMPLE_LOADED:
- if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
- synchronized(mLock) {
- if (mOnLoadCompleteListener != null) {
- mOnLoadCompleteListener.onLoadComplete(mSoundPool, msg.arg1, msg.arg2);
- }
- }
- break;
- default:
- Log.e(TAG, "Unknown message type " + msg.what);
- return;
- }
- }
- }
-
- // post event from native code to message handler
- private static void postEventFromNative(Object weakRef, int msg, int arg1, int arg2, Object obj)
- {
- SoundPoolImpl soundPoolImpl = (SoundPoolImpl)((WeakReference)weakRef).get();
- if (soundPoolImpl == null)
+ break;
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
return;
-
- if (soundPoolImpl.mEventHandler != null) {
- Message m = soundPoolImpl.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
- soundPoolImpl.mEventHandler.sendMessage(m);
}
}
-
- public native final void release();
-
- private native final int native_setup(Object weakRef, int maxStreams,
- Object/*AudioAttributes*/ attributes);
-
- protected void finalize() { release(); }
}
/**
- * No-op implementation of SoundPool.
- * Used when media is disabled by the system.
- * @hide
+ * Builder class for {@link SoundPool} objects.
*/
- /* package */ static class SoundPoolStub implements SoundPoolDelegate {
- public SoundPoolStub() { }
-
- public int load(String path, int priority) {
- return 0;
- }
-
- @Override
- public int load(Context context, int resId, int priority) {
- return 0;
- }
-
- @Override
- public int load(AssetFileDescriptor afd, int priority) {
- return 0;
- }
-
- @Override
- public int load(FileDescriptor fd, long offset, long length, int priority) {
- return 0;
- }
+ public static class Builder {
+ private int mMaxStreams = 1;
+ private AudioAttributes mAudioAttributes;
- @Override
- public final boolean unload(int soundID) {
- return true;
+ /**
+ * Constructs a new Builder with the defaults format values.
+ * If not provided, the maximum number of streams is 1 (see {@link #setMaxStreams(int)} to
+ * change it), and the audio attributes have a usage value of
+ * {@link AudioAttributes#USAGE_MEDIA} (see {@link #setAudioAttributes(AudioAttributes)} to
+ * change them).
+ */
+ public Builder() {
}
- @Override
- public final int play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate) {
- return 0;
+ /**
+ * Sets the maximum of number of simultaneous streams that can be played simultaneously.
+ * @param maxStreams a value equal to 1 or greater.
+ * @return the same Builder instance
+ * @throws IllegalArgumentException
+ */
+ public Builder setMaxStreams(int maxStreams) throws IllegalArgumentException {
+ if (maxStreams <= 0) {
+ throw new IllegalArgumentException(
+ "Strictly positive value required for the maximum number of streams");
+ }
+ mMaxStreams = maxStreams;
+ return this;
}
- @Override
- public final void pause(int streamID) { }
-
- @Override
- public final void resume(int streamID) { }
-
- @Override
- public final void autoPause() { }
-
- @Override
- public final void autoResume() { }
-
- @Override
- public final void stop(int streamID) { }
-
- @Override
- public final void setVolume(int streamID,
- float leftVolume, float rightVolume) { }
-
- @Override
- public void setVolume(int streamID, float volume) {
+ /**
+ * Sets the {@link AudioAttributes}. For examples, game applications will use attributes
+ * built with usage information set to {@link AudioAttributes#USAGE_GAME}.
+ * @param attributes a non-null
+ * @return
+ */
+ public Builder setAudioAttributes(AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Invalid null AudioAttributes");
+ }
+ mAudioAttributes = attributes;
+ return this;
}
- @Override
- public final void setPriority(int streamID, int priority) { }
-
- @Override
- public final void setLoop(int streamID, int loop) { }
-
- @Override
- public final void setRate(int streamID, float rate) { }
-
- @Override
- public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener) {
+ public SoundPool build() {
+ if (mAudioAttributes == null) {
+ mAudioAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA).build();
+ }
+ return new SoundPool(mMaxStreams, mAudioAttributes);
}
-
- @Override
- public final void release() { }
}
}
diff --git a/media/java/android/media/SubtitleController.java b/media/java/android/media/SubtitleController.java
index f82dbe0..fd72b39 100644
--- a/media/java/android/media/SubtitleController.java
+++ b/media/java/android/media/SubtitleController.java
@@ -20,6 +20,7 @@ import java.util.Locale;
import java.util.Vector;
import android.content.Context;
+import android.media.MediaPlayer.TrackInfo;
import android.media.SubtitleTrack.RenderingWidget;
import android.os.Handler;
import android.os.Looper;
@@ -275,7 +276,8 @@ public class SubtitleController {
mSelectedTrack.getFormat().getInteger(
MediaFormat.KEY_IS_FORCED_SUBTITLE, 0) != 0)) {
show();
- } else if (mSelectedTrack != null && !mSelectedTrack.isTimedText()) {
+ } else if (mSelectedTrack != null
+ && mSelectedTrack.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
hide();
}
mVisibilityIsExplicit = false;
diff --git a/media/java/android/media/SubtitleTrack.java b/media/java/android/media/SubtitleTrack.java
index c760810..6c8e323 100644
--- a/media/java/android/media/SubtitleTrack.java
+++ b/media/java/android/media/SubtitleTrack.java
@@ -17,6 +17,7 @@
package android.media;
import android.graphics.Canvas;
+import android.media.MediaPlayer.TrackInfo;
import android.os.Handler;
import android.util.Log;
import android.util.LongSparseArray;
@@ -609,8 +610,10 @@ public abstract class SubtitleTrack implements MediaTimeProvider.OnMediaTimeList
}
/** @hide whether this is a text track who fires events instead getting rendered */
- public boolean isTimedText() {
- return getRenderingWidget() == null;
+ public int getTrackType() {
+ return getRenderingWidget() == null
+ ? TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT
+ : TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE;
}
diff --git a/media/java/android/media/SyncParams.java b/media/java/android/media/SyncParams.java
new file mode 100644
index 0000000..319eacb
--- /dev/null
+++ b/media/java/android/media/SyncParams.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import android.annotation.IntDef;
+
+/**
+ * Structure for common A/V sync params.
+ *
+ * Used by {@link MediaSync} {link MediaSync#getSyncParams()} and
+ * {link MediaSync#setSyncParams(SyncParams)}
+ * to control A/V sync behavior.
+ * <p> <strong>audio adjust mode:</strong>
+ * select handling of audio track when changing playback speed due to sync.
+ * <ul>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_STRETCH}:
+ * Change the speed of audio playback without altering its pitch.</li>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_RESAMPLE}:
+ * Change the speed of audio playback by resampling the audio.</li>
+ * </ul>
+ * <p> <strong>sync source:</strong> select
+ * clock source for sync.
+ * <ul>
+ * <li> {@link SyncParams#SYNC_SOURCE_DEFAULT}:
+ * System will determine best selection.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_SYSTEM_CLOCK}:
+ * Use system clock for sync source.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_AUDIO}:
+ * Use audio track for sync source.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_VSYNC}:
+ * Syncronize media to vsync.</li>
+ * </ul>
+ * <p> <strong>tolerance:</strong> specifies the amount of allowed playback rate
+ * change to keep media in sync with the sync source. The handling of this depends
+ * on the sync source, but must not be negative, and must be less than one.
+ * <p> <strong>frameRate:</strong> initial hint for video frame rate. Used when
+ * sync source is vsync. Negative values can be used to clear a previous hint.
+ */
+public final class SyncParams {
+ /** @hide */
+ @IntDef(
+ value = {
+ SYNC_SOURCE_DEFAULT,
+ SYNC_SOURCE_SYSTEM_CLOCK,
+ SYNC_SOURCE_AUDIO,
+ SYNC_SOURCE_VSYNC,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SyncSource {}
+
+ /**
+ * Use the default sync source (default). If media has video, the sync renders to a
+ * surface that directly renders to a display, and tolerance is non zero (e.g. not
+ * less than 0.001) vsync source is used for clock source. Otherwise, if media has
+ * audio, audio track is used. Finally, if media has no audio, system clock is used.
+ */
+ public static final int SYNC_SOURCE_DEFAULT = 0;
+
+ /**
+ * Use system monotonic clock for sync source.
+ *
+ * @see System#nanoTime
+ */
+ public static final int SYNC_SOURCE_SYSTEM_CLOCK = 1;
+
+ /**
+ * Use audio track for sync source. This requires audio data and an audio track.
+ *
+ * @see AudioTrack#getTimeStamp
+ */
+ public static final int SYNC_SOURCE_AUDIO = 2;
+
+ /**
+ * Use vsync as the sync source. This requires video data and an output surface that
+ * directly renders to the display, e.g. {@link android.view.SurfaceView}
+ * <p>
+ * This mode allows smoother playback experience by adjusting the playback speed
+ * to match the vsync rate, e.g. playing 30fps content on a 59.94Hz display.
+ * When using this mode, the tolerance should be set to greater than 0 (e.g. at least
+ * 1/1000), so that the playback speed can actually be adjusted.
+ * <p>
+ * This mode can also be used to play 25fps content on a 60Hz display using
+ * a 2:3 pulldown (basically playing the content at 24fps), which results on
+ * better playback experience on most devices. In this case the tolerance should be
+ * at least (1/24).
+ *
+ * @see android.view.Choreographer.FrameCallback#doFrame
+ * @see android.view.Display#getAppVsyncOffsetNanos
+ */
+ public static final int SYNC_SOURCE_VSYNC = 3;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_ADJUST_MODE_DEFAULT,
+ AUDIO_ADJUST_MODE_STRETCH,
+ AUDIO_ADJUST_MODE_RESAMPLE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioAdjustMode {}
+
+ /**
+ * System will determine best handling of audio for playback rate
+ * adjustments.
+ * <p>
+ * Used by default. This will make audio play faster or slower as required
+ * by the sync source without changing its pitch; however, system may fall
+ * back to some other method (e.g. change the pitch, or mute the audio) if
+ * time stretching is no longer supported for the playback rate.
+ */
+ public static final int AUDIO_ADJUST_MODE_DEFAULT = 0;
+
+ /**
+ * Time stretch audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * without changing its pitch, as long as it is supported for the playback
+ * rate.
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ */
+ public static final int AUDIO_ADJUST_MODE_STRETCH = 1;
+
+ /**
+ * Resample audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * by changing its pitch (making it lower to play slower, and higher to play
+ * faster.)
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ */
+ public static final int AUDIO_ADJUST_MODE_RESAMPLE = 2;
+
+ // flags to indicate which params are actually set
+ private static final int SET_SYNC_SOURCE = 1 << 0;
+ private static final int SET_AUDIO_ADJUST_MODE = 1 << 1;
+ private static final int SET_TOLERANCE = 1 << 2;
+ private static final int SET_FRAME_RATE = 1 << 3;
+ private int mSet = 0;
+
+ // params
+ private int mAudioAdjustMode = AUDIO_ADJUST_MODE_DEFAULT;
+ private int mSyncSource = SYNC_SOURCE_DEFAULT;
+ private float mTolerance = 0.f;
+ private float mFrameRate = 0.f;
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams allowDefaults() {
+ mSet |= SET_SYNC_SOURCE | SET_AUDIO_ADJUST_MODE | SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Sets the audio adjust mode.
+ * @param audioAdjustMode
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setAudioAdjustMode(@AudioAdjustMode int audioAdjustMode) {
+ mAudioAdjustMode = audioAdjustMode;
+ mSet |= SET_AUDIO_ADJUST_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio adjust mode.
+ * @return audio adjust mode
+ * @throws IllegalStateException if the audio adjust mode is not set.
+ */
+ public @AudioAdjustMode int getAudioAdjustMode() {
+ if ((mSet & SET_AUDIO_ADJUST_MODE) == 0) {
+ throw new IllegalStateException("audio adjust mode not set");
+ }
+ return mAudioAdjustMode;
+ }
+
+ /**
+ * Sets the sync source.
+ * @param syncSource
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setSyncSource(@SyncSource int syncSource) {
+ mSyncSource = syncSource;
+ mSet |= SET_SYNC_SOURCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the sync source.
+ * @return sync source
+ * @throws IllegalStateException if the sync source is not set.
+ */
+ public @SyncSource int getSyncSource() {
+ if ((mSet & SET_SYNC_SOURCE) == 0) {
+ throw new IllegalStateException("sync source not set");
+ }
+ return mSyncSource;
+ }
+
+ /**
+ * Sets the tolerance. The default tolerance is platform specific, but is never more than 1/24.
+ * @param tolerance A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @return this <code>SyncParams</code> instance.
+ * @throws InvalidArgumentException if the tolerance is negative, or not less than one
+ */
+ public SyncParams setTolerance(float tolerance) {
+ if (tolerance < 0.f || tolerance >= 1.f) {
+ throw new IllegalArgumentException("tolerance must be less than one and non-negative");
+ }
+ mTolerance = tolerance;
+ mSet |= SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the tolerance factor.
+ * @return tolerance factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @throws IllegalStateException if tolerance is not set.
+ */
+ public float getTolerance() {
+ if ((mSet & SET_TOLERANCE) == 0) {
+ throw new IllegalStateException("tolerance not set");
+ }
+ return mTolerance;
+ }
+
+ /**
+ * Sets the video frame rate hint to be used. By default the frame rate is unspecified.
+ * @param frameRate A non-negative number used as an initial hint on
+ * the video frame rate to be used when using vsync as the sync source. A negative
+ * number is used to clear a previous hint.
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setFrameRate(float frameRate) {
+ mFrameRate = frameRate;
+ mSet |= SET_FRAME_RATE;
+ return this;
+ }
+
+ /**
+ * Retrieves the video frame rate hint.
+ * @return frame rate factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate}), or a negative
+ * number representing the desire to clear a previous hint using these params.
+ * @throws IllegalStateException if frame rate is not set.
+ */
+ public float getFrameRate() {
+ if ((mSet & SET_FRAME_RATE) == 0) {
+ throw new IllegalStateException("frame rate not set");
+ }
+ return mFrameRate;
+ }
+
+}
diff --git a/media/java/android/media/TimedMetaData.java b/media/java/android/media/TimedMetaData.java
new file mode 100644
index 0000000..0ab52d7
--- /dev/null
+++ b/media/java/android/media/TimedMetaData.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+
+/**
+ * Class that embodies one timed metadata access unit, including
+ *
+ * <ul>
+ * <li> a time stamp, and </li>
+ * <li> raw uninterpreted byte-array extracted directly from the container. </li>
+ * </ul>
+ *
+ * @see MediaPlayer#setOnTimedMetaDataAvailableListener(android.media.MediaPlayer.OnTimedMetaDataListener)
+ */
+public final class TimedMetaData {
+ private static final String TAG = "TimedMetaData";
+
+ private long mTimestampUs;
+ private byte[] mMetaData;
+
+ /**
+ * @hide
+ */
+ static TimedMetaData createTimedMetaDataFromParcel(Parcel parcel) {
+ return new TimedMetaData(parcel);
+ }
+
+ private TimedMetaData(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ /**
+ * @return the timestamp associated with this metadata access unit in microseconds;
+ * 0 denotes playback start.
+ */
+ public long getTimestamp() {
+ return mTimestampUs;
+ }
+
+ /**
+ * @return raw, uninterpreted content of this metadata access unit; for ID3 tags this includes
+ * everything starting from the 3 byte signature "ID3".
+ */
+ public byte[] getMetaData() {
+ return mMetaData;
+ }
+
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ mTimestampUs = parcel.readLong();
+ mMetaData = new byte[parcel.readInt()];
+ parcel.readByteArray(mMetaData);
+
+ return true;
+ }
+}
diff --git a/media/java/android/media/TtmlRenderer.java b/media/java/android/media/TtmlRenderer.java
index 75133c9..9d587b9 100644
--- a/media/java/android/media/TtmlRenderer.java
+++ b/media/java/android/media/TtmlRenderer.java
@@ -17,27 +17,15 @@
package android.media;
import android.content.Context;
-import android.graphics.Color;
-import android.media.SubtitleTrack.RenderingWidget.OnChangedListener;
-import android.text.Layout.Alignment;
-import android.text.SpannableStringBuilder;
import android.text.TextUtils;
-import android.util.ArrayMap;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
-import android.view.ViewGroup;
-import android.view.View.MeasureSpec;
-import android.view.ViewGroup.LayoutParams;
import android.view.accessibility.CaptioningManager;
-import android.view.accessibility.CaptioningManager.CaptionStyle;
-import android.view.accessibility.CaptioningManager.CaptioningChangeListener;
import android.widget.LinearLayout;
import android.widget.TextView;
-import com.android.internal.widget.SubtitleView;
-
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
diff --git a/media/java/android/media/Utils.java b/media/java/android/media/Utils.java
index df0daaf..9e01e65 100644
--- a/media/java/android/media/Utils.java
+++ b/media/java/android/media/Utils.java
@@ -26,8 +26,6 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.Vector;
-import static com.android.internal.util.Preconditions.checkNotNull;
-
// package private
class Utils {
private static final String TAG = "Utils";
diff --git a/media/java/android/media/VolumePolicy.aidl b/media/java/android/media/VolumePolicy.aidl
new file mode 100644
index 0000000..371f798
--- /dev/null
+++ b/media/java/android/media/VolumePolicy.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable VolumePolicy;
diff --git a/media/java/android/media/VolumePolicy.java b/media/java/android/media/VolumePolicy.java
new file mode 100644
index 0000000..1d33128
--- /dev/null
+++ b/media/java/android/media/VolumePolicy.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.Objects;
+
+/** @hide */
+public final class VolumePolicy implements Parcelable {
+ public static final VolumePolicy DEFAULT = new VolumePolicy(false, false, true, 400);
+
+ /** Allow volume adjustments lower from vibrate to enter ringer mode = silent */
+ public final boolean volumeDownToEnterSilent;
+
+ /** Allow volume adjustments higher to exit ringer mode = silent */
+ public final boolean volumeUpToExitSilent;
+
+ /** Automatically enter do not disturb when ringer mode = silent */
+ public final boolean doNotDisturbWhenSilent;
+
+ /** Only allow volume adjustment from vibrate to silent after this
+ number of milliseconds since an adjustment from normal to vibrate. */
+ public final int vibrateToSilentDebounce;
+
+ public VolumePolicy(boolean volumeDownToEnterSilent, boolean volumeUpToExitSilent,
+ boolean doNotDisturbWhenSilent, int vibrateToSilentDebounce) {
+ this.volumeDownToEnterSilent = volumeDownToEnterSilent;
+ this.volumeUpToExitSilent = volumeUpToExitSilent;
+ this.doNotDisturbWhenSilent = doNotDisturbWhenSilent;
+ this.vibrateToSilentDebounce = vibrateToSilentDebounce;
+ }
+
+ @Override
+ public String toString() {
+ return "VolumePolicy[volumeDownToEnterSilent=" + volumeDownToEnterSilent
+ + ",volumeUpToExitSilent=" + volumeUpToExitSilent
+ + ",doNotDisturbWhenSilent=" + doNotDisturbWhenSilent
+ + ",vibrateToSilentDebounce=" + vibrateToSilentDebounce + "]";
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(volumeDownToEnterSilent, volumeUpToExitSilent, doNotDisturbWhenSilent,
+ vibrateToSilentDebounce);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof VolumePolicy)) return false;
+ if (o == this) return true;
+ final VolumePolicy other = (VolumePolicy) o;
+ return other.volumeDownToEnterSilent == volumeDownToEnterSilent
+ && other.volumeUpToExitSilent == volumeUpToExitSilent
+ && other.doNotDisturbWhenSilent == doNotDisturbWhenSilent
+ && other.vibrateToSilentDebounce == vibrateToSilentDebounce;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(volumeDownToEnterSilent ? 1 : 0);
+ dest.writeInt(volumeUpToExitSilent ? 1 : 0);
+ dest.writeInt(doNotDisturbWhenSilent ? 1 : 0);
+ dest.writeInt(vibrateToSilentDebounce);
+ }
+
+ public static final Parcelable.Creator<VolumePolicy> CREATOR
+ = new Parcelable.Creator<VolumePolicy>() {
+ @Override
+ public VolumePolicy createFromParcel(Parcel p) {
+ return new VolumePolicy(p.readInt() != 0,
+ p.readInt() != 0,
+ p.readInt() != 0,
+ p.readInt());
+ }
+
+ @Override
+ public VolumePolicy[] newArray(int size) {
+ return new VolumePolicy[size];
+ }
+ };
+} \ No newline at end of file
diff --git a/media/java/android/media/audiofx/AcousticEchoCanceler.java b/media/java/android/media/audiofx/AcousticEchoCanceler.java
index 4b59c88..f5f98ef 100644
--- a/media/java/android/media/audiofx/AcousticEchoCanceler.java
+++ b/media/java/android/media/audiofx/AcousticEchoCanceler.java
@@ -68,9 +68,8 @@ public class AcousticEchoCanceler extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return aec;
}
+ return aec;
}
/**
diff --git a/media/java/android/media/audiofx/AudioEffect.java b/media/java/android/media/audiofx/AudioEffect.java
index a8b9686..b94a7e6 100644
--- a/media/java/android/media/audiofx/AudioEffect.java
+++ b/media/java/android/media/audiofx/AudioEffect.java
@@ -18,6 +18,7 @@ package android.media.audiofx;
import android.annotation.SdkConstant;
import android.annotation.SdkConstant.SdkConstantType;
+import android.app.ActivityThread;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
@@ -395,7 +396,7 @@ public class AudioEffect {
// native initialization
int initResult = native_setup(new WeakReference<AudioEffect>(this),
type.toString(), uuid.toString(), priority, audioSession, id,
- desc);
+ desc, ActivityThread.currentOpPackageName());
if (initResult != SUCCESS && initResult != ALREADY_EXISTS) {
Log.e(TAG, "Error code " + initResult
+ " when initializing AudioEffect.");
@@ -1217,7 +1218,8 @@ public class AudioEffect {
private static native final void native_init();
private native final int native_setup(Object audioeffect_this, String type,
- String uuid, int priority, int audioSession, int[] id, Object[] desc);
+ String uuid, int priority, int audioSession, int[] id, Object[] desc,
+ String opPackageName);
private native final void native_finalize();
diff --git a/media/java/android/media/audiofx/AutomaticGainControl.java b/media/java/android/media/audiofx/AutomaticGainControl.java
index 83eb4e9..4a6b1f3 100644
--- a/media/java/android/media/audiofx/AutomaticGainControl.java
+++ b/media/java/android/media/audiofx/AutomaticGainControl.java
@@ -68,9 +68,8 @@ public class AutomaticGainControl extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return agc;
}
+ return agc;
}
/**
diff --git a/media/java/android/media/audiofx/NoiseSuppressor.java b/media/java/android/media/audiofx/NoiseSuppressor.java
index 0ea42ab..bca990f 100644
--- a/media/java/android/media/audiofx/NoiseSuppressor.java
+++ b/media/java/android/media/audiofx/NoiseSuppressor.java
@@ -70,9 +70,8 @@ public class NoiseSuppressor extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return ns;
}
+ return ns;
}
/**
diff --git a/media/java/android/media/audiofx/Virtualizer.java b/media/java/android/media/audiofx/Virtualizer.java
index be5adc8..49e56bc 100644
--- a/media/java/android/media/audiofx/Virtualizer.java
+++ b/media/java/android/media/audiofx/Virtualizer.java
@@ -17,7 +17,7 @@
package android.media.audiofx;
import android.annotation.IntDef;
-import android.media.AudioDevice;
+import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.audiofx.AudioEffect;
import android.util.Log;
@@ -204,7 +204,7 @@ public class Virtualizer extends AudioEffect {
// convert channel mask to internal native representation
paramsConverter.putInt(AudioFormat.convertChannelOutMaskToNativeMask(channelMask));
// convert Java device type to internal representation
- paramsConverter.putInt(AudioDevice.convertDeviceTypeToInternalDevice(deviceType));
+ paramsConverter.putInt(AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType));
// allocate an array to store the results
byte[] result = new byte[nbChannels * 4/*int to byte*/ * 3/*for mask, azimuth, elevation*/];
@@ -305,9 +305,9 @@ public class Virtualizer extends AudioEffect {
throws IllegalArgumentException {
switch (virtualizationMode) {
case VIRTUALIZATION_MODE_BINAURAL:
- return AudioDevice.TYPE_WIRED_HEADPHONES;
+ return AudioDeviceInfo.TYPE_WIRED_HEADPHONES;
case VIRTUALIZATION_MODE_TRANSAURAL:
- return AudioDevice.TYPE_BUILTIN_SPEAKER;
+ return AudioDeviceInfo.TYPE_BUILTIN_SPEAKER;
default:
throw (new IllegalArgumentException(
"Virtualizer: illegal virtualization mode " + virtualizationMode));
@@ -317,7 +317,7 @@ public class Virtualizer extends AudioEffect {
private static int getDeviceForModeForce(@ForceVirtualizationMode int virtualizationMode)
throws IllegalArgumentException {
if (virtualizationMode == VIRTUALIZATION_MODE_AUTO) {
- return AudioDevice.TYPE_UNKNOWN;
+ return AudioDeviceInfo.TYPE_UNKNOWN;
} else {
return getDeviceForModeQuery(virtualizationMode);
}
@@ -325,24 +325,24 @@ public class Virtualizer extends AudioEffect {
private static int deviceToMode(int deviceType) {
switch (deviceType) {
- case AudioDevice.TYPE_WIRED_HEADSET:
- case AudioDevice.TYPE_WIRED_HEADPHONES:
- case AudioDevice.TYPE_BLUETOOTH_SCO:
- case AudioDevice.TYPE_BUILTIN_EARPIECE:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
return VIRTUALIZATION_MODE_BINAURAL;
- case AudioDevice.TYPE_BUILTIN_SPEAKER:
- case AudioDevice.TYPE_LINE_ANALOG:
- case AudioDevice.TYPE_LINE_DIGITAL:
- case AudioDevice.TYPE_BLUETOOTH_A2DP:
- case AudioDevice.TYPE_HDMI:
- case AudioDevice.TYPE_HDMI_ARC:
- case AudioDevice.TYPE_USB_DEVICE:
- case AudioDevice.TYPE_USB_ACCESSORY:
- case AudioDevice.TYPE_DOCK:
- case AudioDevice.TYPE_FM:
- case AudioDevice.TYPE_AUX_LINE:
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_HDMI:
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ case AudioDeviceInfo.TYPE_DOCK:
+ case AudioDeviceInfo.TYPE_FM:
+ case AudioDeviceInfo.TYPE_AUX_LINE:
return VIRTUALIZATION_MODE_TRANSAURAL;
- case AudioDevice.TYPE_UNKNOWN:
+ case AudioDeviceInfo.TYPE_UNKNOWN:
default:
return VIRTUALIZATION_MODE_OFF;
}
@@ -433,7 +433,7 @@ public class Virtualizer extends AudioEffect {
throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
// convert Java device type to internal representation
int deviceType = getDeviceForModeForce(virtualizationMode);
- int internalDevice = AudioDevice.convertDeviceTypeToInternalDevice(deviceType);
+ int internalDevice = AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType);
int status = setParameter(PARAM_FORCE_VIRTUALIZATION_MODE, internalDevice);
@@ -470,7 +470,7 @@ public class Virtualizer extends AudioEffect {
int[] value = new int[1];
int status = getParameter(PARAM_VIRTUALIZATION_MODE, value);
if (status >= 0) {
- return deviceToMode(AudioDevice.convertInternalDeviceToDeviceType(value[0]));
+ return deviceToMode(AudioDeviceInfo.convertInternalDeviceToDeviceType(value[0]));
} else if (status == AudioEffect.ERROR_BAD_VALUE) {
return VIRTUALIZATION_MODE_OFF;
} else {
diff --git a/media/java/android/media/audiofx/Visualizer.java b/media/java/android/media/audiofx/Visualizer.java
index 24c74ac..0fe7246 100644
--- a/media/java/android/media/audiofx/Visualizer.java
+++ b/media/java/android/media/audiofx/Visualizer.java
@@ -16,6 +16,7 @@
package android.media.audiofx;
+import android.app.ActivityThread;
import android.util.Log;
import java.lang.ref.WeakReference;
import android.os.Handler;
@@ -206,7 +207,8 @@ public class Visualizer {
synchronized (mStateLock) {
mState = STATE_UNINITIALIZED;
// native initialization
- int result = native_setup(new WeakReference<Visualizer>(this), audioSession, id);
+ int result = native_setup(new WeakReference<Visualizer>(this), audioSession, id,
+ ActivityThread.currentOpPackageName());
if (result != SUCCESS && result != ALREADY_EXISTS) {
Log.e(TAG, "Error code "+result+" when initializing Visualizer.");
switch (result) {
@@ -716,7 +718,8 @@ public class Visualizer {
private native final int native_setup(Object audioeffect_this,
int audioSession,
- int[] id);
+ int[] id,
+ String opPackageName);
private native final void native_finalize();
@@ -765,6 +768,5 @@ public class Visualizer {
}
}
-
}
diff --git a/media/java/android/media/audiopolicy/AudioMix.java b/media/java/android/media/audiopolicy/AudioMix.java
index 1806662..4ffac6d 100644
--- a/media/java/android/media/audiopolicy/AudioMix.java
+++ b/media/java/android/media/audiopolicy/AudioMix.java
@@ -36,18 +36,30 @@ public class AudioMix {
private int mRouteFlags;
private String mRegistrationId;
private int mMixType = MIX_TYPE_INVALID;
+ int mMixState = MIX_STATE_DISABLED;
+ int mCallbackFlags;
/**
* All parameters are guaranteed valid through the Builder.
*/
- private AudioMix(AudioMixingRule rule, AudioFormat format, int routeFlags) {
+ private AudioMix(AudioMixingRule rule, AudioFormat format, int routeFlags, int callbackFlags) {
mRule = rule;
mFormat = format;
mRouteFlags = routeFlags;
mRegistrationId = null;
mMixType = rule.getTargetMixType();
+ mCallbackFlags = callbackFlags;
}
+ // CALLBACK_FLAG_* values: keep in sync with AudioMix::kCbFlag* values defined
+ // in frameworks/av/include/media/AudioPolicy.h
+ /** @hide */
+ public final static int CALLBACK_FLAG_NOTIFY_ACTIVITY = 0x1;
+ // when adding new MIX_FLAG_* flags, add them to this mask of authorized masks:
+ private final static int CALLBACK_FLAGS_ALL = CALLBACK_FLAG_NOTIFY_ACTIVITY;
+
+ // ROUTE_FLAG_* values: keep in sync with MIX_ROUTE_FLAG_* values defined
+ // in frameworks/av/include/media/AudioPolicy.h
/**
* An audio mix behavior where the output of the mix is sent to the original destination of
* the audio signal, i.e. an output device for an output mix, or a recording for an input mix.
@@ -62,6 +74,7 @@ public class AudioMix {
@SystemApi
public static final int ROUTE_FLAG_LOOP_BACK = 0x1 << 1;
+ // MIX_TYPE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
/**
* @hide
* Invalid mix type, default value.
@@ -78,6 +91,39 @@ public class AudioMix {
*/
public static final int MIX_TYPE_RECORDERS = 1;
+
+ // MIX_STATE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
+ /**
+ * @hide
+ * State of a mix before its policy is enabled.
+ */
+ @SystemApi
+ public static final int MIX_STATE_DISABLED = -1;
+ /**
+ * @hide
+ * State of a mix when there is no audio to mix.
+ */
+ @SystemApi
+ public static final int MIX_STATE_IDLE = 0;
+ /**
+ * @hide
+ * State of a mix that is actively mixing audio.
+ */
+ @SystemApi
+ public static final int MIX_STATE_MIXING = 1;
+
+ /**
+ * @hide
+ * The current mixing state.
+ * @return one of {@link #MIX_STATE_DISABLED}, {@link #MIX_STATE_IDLE},
+ * {@link #MIX_STATE_MIXING}.
+ */
+ @SystemApi
+ public int getMixState() {
+ return mMixState;
+ }
+
+
int getRouteFlags() {
return mRouteFlags;
}
@@ -125,6 +171,7 @@ public class AudioMix {
private AudioMixingRule mRule = null;
private AudioFormat mFormat = null;
private int mRouteFlags = 0;
+ private int mCallbackFlags = 0;
/**
* @hide
@@ -163,6 +210,22 @@ public class AudioMix {
}
/**
+ * @hide
+ * Only used by AudioPolicyConfig, not a public API.
+ * @param callbackFlags which callbacks are called from native
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setCallbackFlags(int flags) throws IllegalArgumentException {
+ if ((flags != 0) && ((flags & CALLBACK_FLAGS_ALL) == 0)) {
+ throw new IllegalArgumentException("Illegal callback flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
+ }
+ mCallbackFlags = flags;
+ return this;
+ }
+
+ /**
* Sets the {@link AudioFormat} for the mix.
* @param format a non-null {@link AudioFormat} instance.
* @return the same Builder instance.
@@ -220,7 +283,7 @@ public class AudioMix {
}
mFormat = new AudioFormat.Builder().setSampleRate(rate).build();
}
- return new AudioMix(mRule, mFormat, mRouteFlags);
+ return new AudioMix(mRule, mFormat, mRouteFlags, mCallbackFlags);
}
}
}
diff --git a/media/java/android/media/audiopolicy/AudioPolicy.java b/media/java/android/media/audiopolicy/AudioPolicy.java
index f128044..423b467 100644
--- a/media/java/android/media/audiopolicy/AudioPolicy.java
+++ b/media/java/android/media/audiopolicy/AudioPolicy.java
@@ -189,6 +189,12 @@ public class AudioPolicy {
@SystemApi
public AudioPolicy build() {
+ if (mStatusListener != null) {
+ // the AudioPolicy status listener includes updates on each mix activity state
+ for (AudioMix mix : mMixes) {
+ mix.mCallbackFlags |= AudioMix.CALLBACK_FLAG_NOTIFY_ACTIVITY;
+ }
+ }
return new AudioPolicy(new AudioPolicyConfig(mMixes), mContext, mLooper,
mFocusListener, mStatusListener);
}
@@ -432,6 +438,18 @@ public class AudioPolicy {
+ afi.getClientId() + "wasNotified=" + wasNotified);
}
}
+
+ public void notifyMixStateUpdate(String regId, int state) {
+ for (AudioMix mix : mConfig.getMixes()) {
+ if (mix.getRegistration().equals(regId)) {
+ mix.mMixState = state;
+ sendMsg(MSG_MIX_STATE_UPDATE, mix, 0/*ignored*/);
+ if (DEBUG) {
+ Log.v(TAG, "notifyMixStateUpdate: regId=" + regId + " state=" + state);
+ }
+ }
+ }
+ }
};
//==================================================
@@ -440,6 +458,7 @@ public class AudioPolicy {
private final static int MSG_POLICY_STATUS_CHANGE = 0;
private final static int MSG_FOCUS_GRANT = 1;
private final static int MSG_FOCUS_LOSS = 2;
+ private final static int MSG_MIX_STATE_UPDATE = 3;
private class EventHandler extends Handler {
public EventHandler(AudioPolicy ap, Looper looper) {
@@ -464,6 +483,11 @@ public class AudioPolicy {
(AudioFocusInfo) msg.obj, msg.arg1 != 0);
}
break;
+ case MSG_MIX_STATE_UPDATE:
+ if (mStatusListener != null) {
+ mStatusListener.onMixStateUpdate((AudioMix) msg.obj);
+ }
+ break;
default:
Log.e(TAG, "Unknown event " + msg.what);
}
diff --git a/media/java/android/media/audiopolicy/AudioPolicyConfig.java b/media/java/android/media/audiopolicy/AudioPolicyConfig.java
index 019309d..252f5f4 100644
--- a/media/java/android/media/audiopolicy/AudioPolicyConfig.java
+++ b/media/java/android/media/audiopolicy/AudioPolicyConfig.java
@@ -16,12 +16,8 @@
package android.media.audiopolicy;
-import android.media.AudioAttributes;
import android.media.AudioFormat;
-import android.media.AudioManager;
import android.media.audiopolicy.AudioMixingRule.AttributeMatchCriterion;
-import android.os.Binder;
-import android.os.IBinder;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Log;
@@ -63,6 +59,10 @@ public class AudioPolicyConfig implements Parcelable {
mMixes.add(mix);
}
+ public ArrayList<AudioMix> getMixes() {
+ return mMixes;
+ }
+
@Override
public int hashCode() {
return Objects.hash(mMixes);
@@ -79,6 +79,8 @@ public class AudioPolicyConfig implements Parcelable {
for (AudioMix mix : mMixes) {
// write mix route flags
dest.writeInt(mix.getRouteFlags());
+ // write callback flags
+ dest.writeInt(mix.mCallbackFlags);
// write mix format
dest.writeInt(mix.getFormat().getSampleRate());
dest.writeInt(mix.getFormat().getEncoding());
@@ -100,6 +102,8 @@ public class AudioPolicyConfig implements Parcelable {
// read mix route flags
int routeFlags = in.readInt();
mixBuilder.setRouteFlags(routeFlags);
+ // read callback flags
+ mixBuilder.setCallbackFlags(in.readInt());
// read mix format
int sampleRate = in.readInt();
int encoding = in.readInt();
diff --git a/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl b/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl
index c777c58..ad8af15 100644
--- a/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl
+++ b/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl
@@ -25,4 +25,7 @@ oneway interface IAudioPolicyCallback {
// callbacks for audio focus
void notifyAudioFocusGrant(in AudioFocusInfo afi, int requestResult);
void notifyAudioFocusLoss(in AudioFocusInfo afi, boolean wasNotified);
+
+ // callback for mix activity status update
+ void notifyMixStateUpdate(in String regId, int state);
}
diff --git a/media/java/android/media/browse/MediaBrowser.java b/media/java/android/media/browse/MediaBrowser.java
index d260b05..ef8d169 100644
--- a/media/java/android/media/browse/MediaBrowser.java
+++ b/media/java/android/media/browse/MediaBrowser.java
@@ -33,6 +33,7 @@ import android.os.IBinder;
import android.os.Parcel;
import android.os.Parcelable;
import android.os.RemoteException;
+import android.os.ResultReceiver;
import android.service.media.MediaBrowserService;
import android.service.media.IMediaBrowserService;
import android.service.media.IMediaBrowserServiceCallbacks;
@@ -291,15 +292,17 @@ public final class MediaBrowser {
* the specified id and subscribes to receive updates when they change.
* <p>
* The list of subscriptions is maintained even when not connected and is
- * restored after reconnection. It is ok to subscribe while not connected
+ * restored after reconnection. It is ok to subscribe while not connected
* but the results will not be returned until the connection completes.
- * </p><p>
+ * </p>
+ * <p>
* If the id is already subscribed with a different callback then the new
- * callback will replace the previous one.
+ * callback will replace the previous one and the child data will be
+ * reloaded.
* </p>
*
* @param parentId The id of the parent media item whose list of children
- * will be subscribed.
+ * will be subscribed.
* @param callback The callback to receive the list of children.
*/
public void subscribe(@NonNull String parentId, @NonNull SubscriptionCallback callback) {
@@ -322,7 +325,7 @@ public final class MediaBrowser {
// If we are connected, tell the service that we are watching. If we aren't
// connected, the service will be told when we connect.
- if (mState == CONNECT_STATE_CONNECTED && newSubscription) {
+ if (mState == CONNECT_STATE_CONNECTED) {
try {
mServiceBinder.addSubscription(parentId, mServiceCallbacks);
} catch (RemoteException ex) {
@@ -345,8 +348,8 @@ public final class MediaBrowser {
*/
public void unsubscribe(@NonNull String parentId) {
// Check arguments.
- if (parentId == null) {
- throw new IllegalArgumentException("parentId is null");
+ if (TextUtils.isEmpty(parentId)) {
+ throw new IllegalArgumentException("parentId is empty.");
}
// Remove from our list.
@@ -365,6 +368,60 @@ public final class MediaBrowser {
}
/**
+ * Retrieves a specific {@link MediaItem} from the connected service. Not
+ * all services may support this, so falling back to subscribing to the
+ * parent's id should be used when unavailable.
+ *
+ * @param mediaId The id of the item to retrieve.
+ * @param cb The callback to receive the result on.
+ */
+ public void getMediaItem(@NonNull String mediaId, @NonNull final MediaItemCallback cb) {
+ if (TextUtils.isEmpty(mediaId)) {
+ throw new IllegalArgumentException("mediaId is empty.");
+ }
+ if (cb == null) {
+ throw new IllegalArgumentException("cb is null.");
+ }
+ if (mState != CONNECT_STATE_CONNECTED) {
+ Log.i(TAG, "Not connected, unable to retrieve the MediaItem.");
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ cb.onError();
+ }
+ });
+ return;
+ }
+ ResultReceiver receiver = new ResultReceiver(mHandler) {
+ @Override
+ protected void onReceiveResult(int resultCode, Bundle resultData) {
+ if (resultCode != 0 || resultData == null
+ || !resultData.containsKey(MediaBrowserService.KEY_MEDIA_ITEM)) {
+ cb.onError();
+ return;
+ }
+ Parcelable item = resultData.getParcelable(MediaBrowserService.KEY_MEDIA_ITEM);
+ if (!(item instanceof MediaItem)) {
+ cb.onError();
+ }
+ cb.onMediaItemLoaded((MediaItem) resultData.getParcelable(
+ MediaBrowserService.KEY_MEDIA_ITEM));
+ }
+ };
+ try {
+ mServiceBinder.getMediaItem(mediaId, receiver);
+ } catch (RemoteException e) {
+ Log.i(TAG, "Remote error getting media item.");
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ cb.onError();
+ }
+ });
+ }
+ }
+
+ /**
* For debugging.
*/
private static String getStateLabel(int state) {
@@ -688,6 +745,27 @@ public final class MediaBrowser {
}
/**
+ * Callback for receiving the result of {@link #getMediaItem}.
+ */
+ public static abstract class MediaItemCallback {
+
+ /**
+ * Called when the item has been returned by the browser service.
+ *
+ * @param item The item that was returned or null if it doesn't exist.
+ */
+ public void onMediaItemLoaded(MediaItem item) {
+ }
+
+ /**
+ * Called when the id doesn't exist or there was an error retrieving the
+ * item.
+ */
+ public void onError() {
+ }
+ }
+
+ /**
* ServiceConnection to the other app.
*/
private class MediaServiceConnection implements ServiceConnection {
diff --git a/media/java/android/media/midi/IMidiDeviceListener.aidl b/media/java/android/media/midi/IMidiDeviceListener.aidl
new file mode 100644
index 0000000..31c66e3
--- /dev/null
+++ b/media/java/android/media/midi/IMidiDeviceListener.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.media.midi.MidiDeviceInfo;
+import android.media.midi.MidiDeviceStatus;
+
+/** @hide */
+oneway interface IMidiDeviceListener
+{
+ void onDeviceAdded(in MidiDeviceInfo device);
+ void onDeviceRemoved(in MidiDeviceInfo device);
+ void onDeviceStatusChanged(in MidiDeviceStatus status);
+}
diff --git a/media/java/android/media/midi/IMidiDeviceServer.aidl b/media/java/android/media/midi/IMidiDeviceServer.aidl
new file mode 100644
index 0000000..96d12fd
--- /dev/null
+++ b/media/java/android/media/midi/IMidiDeviceServer.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.media.midi.MidiDeviceInfo;
+import android.os.ParcelFileDescriptor;
+
+/** @hide */
+interface IMidiDeviceServer
+{
+ ParcelFileDescriptor openInputPort(IBinder token, int portNumber);
+ ParcelFileDescriptor openOutputPort(IBinder token, int portNumber);
+ void closePort(IBinder token);
+
+ // connects the input port pfd to the specified output port
+ void connectPorts(IBinder token, in ParcelFileDescriptor pfd, int outputPortNumber);
+
+ MidiDeviceInfo getDeviceInfo();
+}
diff --git a/media/java/android/media/midi/IMidiManager.aidl b/media/java/android/media/midi/IMidiManager.aidl
new file mode 100644
index 0000000..fcd4aff
--- /dev/null
+++ b/media/java/android/media/midi/IMidiManager.aidl
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.media.midi.IMidiDeviceListener;
+import android.media.midi.IMidiDeviceServer;
+import android.media.midi.MidiDeviceInfo;
+import android.media.midi.MidiDeviceStatus;
+import android.os.Bundle;
+import android.os.IBinder;
+
+/** @hide */
+interface IMidiManager
+{
+ MidiDeviceInfo[] getDevices();
+
+ // for device creation & removal notifications
+ void registerListener(IBinder token, in IMidiDeviceListener listener);
+ void unregisterListener(IBinder token, in IMidiDeviceListener listener);
+
+ // for opening built-in MIDI devices
+ IMidiDeviceServer openDevice(IBinder token, in MidiDeviceInfo device);
+
+ // for registering built-in MIDI devices
+ MidiDeviceInfo registerDeviceServer(in IMidiDeviceServer server, int numInputPorts,
+ int numOutputPorts, in String[] inputPortNames, in String[] outputPortNames,
+ in Bundle properties, int type);
+
+ // for unregistering built-in MIDI devices
+ void unregisterDeviceServer(in IMidiDeviceServer server);
+
+ // used by MidiDeviceService to access the MidiDeviceInfo that was created based on its
+ // manifest's meta-data
+ MidiDeviceInfo getServiceDeviceInfo(String packageName, String className);
+
+ // used for client's to retrieve a device's MidiDeviceStatus
+ MidiDeviceStatus getDeviceStatus(in MidiDeviceInfo deviceInfo);
+
+ // used by MIDI devices to report their status
+ // the token is used by MidiService for death notification
+ void setDeviceStatus(IBinder token, in MidiDeviceStatus status);
+}
diff --git a/media/java/android/media/midi/MidiDevice.java b/media/java/android/media/midi/MidiDevice.java
new file mode 100644
index 0000000..6526adc
--- /dev/null
+++ b/media/java/android/media/midi/MidiDevice.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.content.Context;
+import android.content.ServiceConnection;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.RemoteException;
+import android.util.Log;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * This class is used for sending and receiving data to and from a MIDI device
+ * Instances of this class are created by {@link MidiManager#openDevice}.
+ */
+public final class MidiDevice implements Closeable {
+ private static final String TAG = "MidiDevice";
+
+ private final MidiDeviceInfo mDeviceInfo;
+ private final IMidiDeviceServer mDeviceServer;
+ private Context mContext;
+ private ServiceConnection mServiceConnection;
+
+
+ private final CloseGuard mGuard = CloseGuard.get();
+
+ /**
+ * This class represents a connection between the output port of one device
+ * and the input port of another. Created by {@link #connectPorts}.
+ * Close this object to terminate the connection.
+ */
+ public class MidiConnection implements Closeable {
+ private final IBinder mToken;
+ private final MidiInputPort mInputPort;
+
+ MidiConnection(IBinder token, MidiInputPort inputPort) {
+ mToken = token;
+ mInputPort = inputPort;
+ }
+
+ @Override
+ public void close() throws IOException {
+ try {
+ mDeviceServer.closePort(mToken);
+ IoUtils.closeQuietly(mInputPort);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiConnection.close");
+ }
+ }
+ }
+
+ /* package */ MidiDevice(MidiDeviceInfo deviceInfo, IMidiDeviceServer server) {
+ this(deviceInfo, server, null, null);
+ }
+
+ /* package */ MidiDevice(MidiDeviceInfo deviceInfo, IMidiDeviceServer server,
+ Context context, ServiceConnection serviceConnection) {
+ mDeviceInfo = deviceInfo;
+ mDeviceServer = server;
+ mContext = context;
+ mServiceConnection = serviceConnection;
+ mGuard.open("close");
+ }
+
+ /**
+ * Returns a {@link MidiDeviceInfo} object, which describes this device.
+ *
+ * @return the {@link MidiDeviceInfo} object
+ */
+ public MidiDeviceInfo getInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Called to open a {@link MidiInputPort} for the specified port number.
+ *
+ * @param portNumber the number of the input port to open
+ * @return the {@link MidiInputPort}
+ */
+ public MidiInputPort openInputPort(int portNumber) {
+ try {
+ IBinder token = new Binder();
+ ParcelFileDescriptor pfd = mDeviceServer.openInputPort(token, portNumber);
+ if (pfd == null) {
+ return null;
+ }
+ return new MidiInputPort(mDeviceServer, token, pfd, portNumber);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in openInputPort");
+ return null;
+ }
+ }
+
+ /**
+ * Called to open a {@link MidiOutputPort} for the specified port number.
+ *
+ * @param portNumber the number of the output port to open
+ * @return the {@link MidiOutputPort}
+ */
+ public MidiOutputPort openOutputPort(int portNumber) {
+ try {
+ IBinder token = new Binder();
+ ParcelFileDescriptor pfd = mDeviceServer.openOutputPort(token, portNumber);
+ if (pfd == null) {
+ return null;
+ }
+ return new MidiOutputPort(mDeviceServer, token, pfd, portNumber);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in openOutputPort");
+ return null;
+ }
+ }
+
+ /**
+ * Connects the supplied {@link MidiInputPort} to the output port of this device
+ * with the specified port number. Once the connection is made, the MidiInput port instance
+ * can no longer receive data via its {@link MidiReceiver#onSend} method.
+ * This method returns a {@link MidiDevice.MidiConnection} object, which can be used
+ * to close the connection.
+ *
+ * @param inputPort the inputPort to connect
+ * @param outputPortNumber the port number of the output port to connect inputPort to.
+ * @return {@link MidiDevice.MidiConnection} object if the connection is successful,
+ * or null in case of failure.
+ */
+ public MidiConnection connectPorts(MidiInputPort inputPort, int outputPortNumber) {
+ if (outputPortNumber < 0 || outputPortNumber >= mDeviceInfo.getOutputPortCount()) {
+ throw new IllegalArgumentException("outputPortNumber out of range");
+ }
+
+ ParcelFileDescriptor pfd = inputPort.claimFileDescriptor();
+ if (pfd == null) {
+ return null;
+ }
+ try {
+ IBinder token = new Binder();
+ mDeviceServer.connectPorts(token, pfd, outputPortNumber);
+ // close our copy of the file descriptor
+ IoUtils.closeQuietly(pfd);
+ return new MidiConnection(token, inputPort);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in connectPorts");
+ return null;
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ mGuard.close();
+ if (mContext != null && mServiceConnection != null) {
+ mContext.unbindService(mServiceConnection);
+ mContext = null;
+ mServiceConnection = null;
+ }
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ mGuard.warnIfOpen();
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return ("MidiDevice: " + mDeviceInfo.toString());
+ }
+}
diff --git a/media/java/android/media/midi/MidiDeviceInfo.aidl b/media/java/android/media/midi/MidiDeviceInfo.aidl
new file mode 100644
index 0000000..f2f37a2
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceInfo.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+parcelable MidiDeviceInfo;
diff --git a/media/java/android/media/midi/MidiDeviceInfo.java b/media/java/android/media/midi/MidiDeviceInfo.java
new file mode 100644
index 0000000..a59be54
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceInfo.java
@@ -0,0 +1,355 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * This class contains information to describe a MIDI device.
+ * For now we only have information that can be retrieved easily for USB devices,
+ * but we will probably expand this in the future.
+ *
+ * This class is just an immutable object to encapsulate the MIDI device description.
+ * Use the MidiDevice class to actually communicate with devices.
+ */
+public final class MidiDeviceInfo implements Parcelable {
+
+ private static final String TAG = "MidiDeviceInfo";
+
+ /**
+ * Constant representing USB MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_USB = 1;
+
+ /**
+ * Constant representing virtual (software based) MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_VIRTUAL = 2;
+
+ /**
+ * Constant representing Bluetooth MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_BLUETOOTH = 3;
+
+ /**
+ * Bundle key for the device's user visible name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}.
+ * For USB devices, this is a concatenation of the manufacturer and product names.
+ */
+ public static final String PROPERTY_NAME = "name";
+
+ /**
+ * Bundle key for the device's manufacturer name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}.
+ * Matches the USB device manufacturer name string for USB MIDI devices.
+ */
+ public static final String PROPERTY_MANUFACTURER = "manufacturer";
+
+ /**
+ * Bundle key for the device's product name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device product name string for USB MIDI devices.
+ */
+ public static final String PROPERTY_PRODUCT = "product";
+
+ /**
+ * Bundle key for the device's version property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device version number for USB MIDI devices.
+ */
+ public static final String PROPERTY_VERSION = "version";
+
+ /**
+ * Bundle key for the device's serial number property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device serial number for USB MIDI devices.
+ */
+ public static final String PROPERTY_SERIAL_NUMBER = "serial_number";
+
+ /**
+ * Bundle key for the device's corresponding USB device.
+ * The value for this property is of type {@link android.hardware.usb.UsbDevice}.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ */
+ public static final String PROPERTY_USB_DEVICE = "usb_device";
+
+ /**
+ * Bundle key for the device's corresponding Bluetooth device.
+ * The value for this property is of type {@link android.bluetooth.BluetoothDevice}.
+ * Only set for Bluetooth MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ */
+ public static final String PROPERTY_BLUETOOTH_DEVICE = "bluetooth_device";
+
+ /**
+ * Bundle key for the device's ALSA card number.
+ * The value for this property is an integer.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_ALSA_CARD = "alsa_card";
+
+ /**
+ * Bundle key for the device's ALSA device number.
+ * The value for this property is an integer.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_ALSA_DEVICE = "alsa_device";
+
+ /**
+ * ServiceInfo for the service hosting the device implementation.
+ * The value for this property is of type {@link android.content.pm.ServiceInfo}.
+ * Only set for Virtual MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_SERVICE_INFO = "service_info";
+
+ /**
+ * Contains information about an input or output port.
+ */
+ public static final class PortInfo {
+ /**
+ * Port type for input ports
+ */
+ public static final int TYPE_INPUT = 1;
+
+ /**
+ * Port type for output ports
+ */
+ public static final int TYPE_OUTPUT = 2;
+
+ private final int mPortType;
+ private final int mPortNumber;
+ private final String mName;
+
+ PortInfo(int type, int portNumber, String name) {
+ mPortType = type;
+ mPortNumber = portNumber;
+ mName = (name == null ? "" : name);
+ }
+
+ /**
+ * Returns the port type of the port (either {@link #TYPE_INPUT} or {@link #TYPE_OUTPUT})
+ * @return the port type
+ */
+ public int getType() {
+ return mPortType;
+ }
+
+ /**
+ * Returns the port number of the port
+ * @return the port number
+ */
+ public int getPortNumber() {
+ return mPortNumber;
+ }
+
+ /**
+ * Returns the name of the port, or empty string if the port has no name
+ * @return the port name
+ */
+ public String getName() {
+ return mName;
+ }
+ }
+
+ private final int mType; // USB or virtual
+ private final int mId; // unique ID generated by MidiService
+ private final int mInputPortCount;
+ private final int mOutputPortCount;
+ private final String[] mInputPortNames;
+ private final String[] mOutputPortNames;
+ private final Bundle mProperties;
+ private final boolean mIsPrivate;
+
+ /**
+ * MidiDeviceInfo should only be instantiated by MidiService implementation
+ * @hide
+ */
+ public MidiDeviceInfo(int type, int id, int numInputPorts, int numOutputPorts,
+ String[] inputPortNames, String[] outputPortNames, Bundle properties,
+ boolean isPrivate) {
+ mType = type;
+ mId = id;
+ mInputPortCount = numInputPorts;
+ mOutputPortCount = numOutputPorts;
+ if (inputPortNames == null) {
+ mInputPortNames = new String[numInputPorts];
+ } else {
+ mInputPortNames = inputPortNames;
+ }
+ if (outputPortNames == null) {
+ mOutputPortNames = new String[numOutputPorts];
+ } else {
+ mOutputPortNames = outputPortNames;
+ }
+ mProperties = properties;
+ mIsPrivate = isPrivate;
+ }
+
+ /**
+ * Returns the type of the device.
+ *
+ * @return the device's type
+ */
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * Returns the ID of the device.
+ * This ID is generated by the MIDI service and is not persistent across device unplugs.
+ *
+ * @return the device's ID
+ */
+ public int getId() {
+ return mId;
+ }
+
+ /**
+ * Returns the device's number of input ports.
+ *
+ * @return the number of input ports
+ */
+ public int getInputPortCount() {
+ return mInputPortCount;
+ }
+
+ /**
+ * Returns the device's number of output ports.
+ *
+ * @return the number of output ports
+ */
+ public int getOutputPortCount() {
+ return mOutputPortCount;
+ }
+
+ /**
+ * Returns information about the device's ports.
+ * The ports are in unspecified order.
+ *
+ * @return array of {@link PortInfo}
+ */
+ public PortInfo[] getPorts() {
+ PortInfo[] ports = new PortInfo[mInputPortCount + mOutputPortCount];
+
+ int index = 0;
+ for (int i = 0; i < mInputPortCount; i++) {
+ ports[index++] = new PortInfo(PortInfo.TYPE_INPUT, i, mInputPortNames[i]);
+ }
+ for (int i = 0; i < mOutputPortCount; i++) {
+ ports[index++] = new PortInfo(PortInfo.TYPE_OUTPUT, i, mOutputPortNames[i]);
+ }
+
+ return ports;
+ }
+
+ /**
+ * Returns the {@link android.os.Bundle} containing the device's properties.
+ *
+ * @return the device's properties
+ */
+ public Bundle getProperties() {
+ return mProperties;
+ }
+
+ /**
+ * Returns true if the device is private. Private devices are only visible and accessible
+ * to clients with the same UID as the application that is hosting the device.
+ *
+ * @return true if the device is private
+ */
+ public boolean isPrivate() {
+ return mIsPrivate;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof MidiDeviceInfo) {
+ return (((MidiDeviceInfo)o).mId == mId);
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return mId;
+ }
+
+ @Override
+ public String toString() {
+ // This is a hack to force the mProperties Bundle to unparcel so we can
+ // print all the names and values.
+ mProperties.getString(PROPERTY_NAME);
+ return ("MidiDeviceInfo[mType=" + mType +
+ ",mInputPortCount=" + mInputPortCount +
+ ",mOutputPortCount=" + mOutputPortCount +
+ ",mProperties=" + mProperties +
+ ",mIsPrivate=" + mIsPrivate);
+ }
+
+ public static final Parcelable.Creator<MidiDeviceInfo> CREATOR =
+ new Parcelable.Creator<MidiDeviceInfo>() {
+ public MidiDeviceInfo createFromParcel(Parcel in) {
+ int type = in.readInt();
+ int id = in.readInt();
+ int inputPorts = in.readInt();
+ int outputPorts = in.readInt();
+ String[] inputPortNames = in.createStringArray();
+ String[] outputPortNames = in.createStringArray();
+ Bundle properties = in.readBundle();
+ boolean isPrivate = (in.readInt() == 1);
+ return new MidiDeviceInfo(type, id, inputPorts, outputPorts,
+ inputPortNames, outputPortNames, properties, isPrivate);
+ }
+
+ public MidiDeviceInfo[] newArray(int size) {
+ return new MidiDeviceInfo[size];
+ }
+ };
+
+ public int describeContents() {
+ return 0;
+ }
+
+ public void writeToParcel(Parcel parcel, int flags) {
+ parcel.writeInt(mType);
+ parcel.writeInt(mId);
+ parcel.writeInt(mInputPortCount);
+ parcel.writeInt(mOutputPortCount);
+ parcel.writeStringArray(mInputPortNames);
+ parcel.writeStringArray(mOutputPortNames);
+ parcel.writeBundle(mProperties);
+ parcel.writeInt(mIsPrivate ? 1 : 0);
+ }
+}
diff --git a/media/java/android/media/midi/MidiDeviceServer.java b/media/java/android/media/midi/MidiDeviceServer.java
new file mode 100644
index 0000000..a316a44
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceServer.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.Process;
+import android.os.RemoteException;
+import android.system.OsConstants;
+import android.util.Log;
+
+import com.android.internal.midi.MidiDispatcher;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+/**
+ * Internal class used for providing an implementation for a MIDI device.
+ *
+ * @hide
+ */
+public final class MidiDeviceServer implements Closeable {
+ private static final String TAG = "MidiDeviceServer";
+
+ private final IMidiManager mMidiManager;
+
+ // MidiDeviceInfo for the device implemented by this server
+ private MidiDeviceInfo mDeviceInfo;
+ private final int mInputPortCount;
+ private final int mOutputPortCount;
+
+ // MidiReceivers for receiving data on our input ports
+ private final MidiReceiver[] mInputPortReceivers;
+
+ // MidiDispatchers for sending data on our output ports
+ private MidiDispatcher[] mOutputPortDispatchers;
+
+ // MidiOutputPorts for clients connected to our input ports
+ private final MidiOutputPort[] mInputPortOutputPorts;
+
+ // List of all MidiInputPorts we created
+ private final CopyOnWriteArrayList<MidiInputPort> mInputPorts
+ = new CopyOnWriteArrayList<MidiInputPort>();
+
+
+ // for reporting device status
+ private final IBinder mDeviceStatusToken = new Binder();
+ private final boolean[] mInputPortOpen;
+ private final int[] mOutputPortOpenCount;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ private final Callback mCallback;
+
+ public interface Callback {
+ /**
+ * Called to notify when an our device status has changed
+ * @param server the {@link MidiDeviceServer} that changed
+ * @param status the {@link MidiDeviceStatus} for the device
+ */
+ public void onDeviceStatusChanged(MidiDeviceServer server, MidiDeviceStatus status);
+ }
+
+ abstract private class PortClient implements IBinder.DeathRecipient {
+ final IBinder mToken;
+
+ PortClient(IBinder token) {
+ mToken = token;
+
+ try {
+ token.linkToDeath(this, 0);
+ } catch (RemoteException e) {
+ close();
+ }
+ }
+
+ abstract void close();
+
+ @Override
+ public void binderDied() {
+ close();
+ }
+ }
+
+ private class InputPortClient extends PortClient {
+ private final MidiOutputPort mOutputPort;
+
+ InputPortClient(IBinder token, MidiOutputPort outputPort) {
+ super(token);
+ mOutputPort = outputPort;
+ }
+
+ @Override
+ void close() {
+ mToken.unlinkToDeath(this, 0);
+ synchronized (mInputPortOutputPorts) {
+ int portNumber = mOutputPort.getPortNumber();
+ mInputPortOutputPorts[portNumber] = null;
+ mInputPortOpen[portNumber] = false;
+ updateDeviceStatus();
+ }
+ IoUtils.closeQuietly(mOutputPort);
+ }
+ }
+
+ private class OutputPortClient extends PortClient {
+ private final MidiInputPort mInputPort;
+
+ OutputPortClient(IBinder token, MidiInputPort inputPort) {
+ super(token);
+ mInputPort = inputPort;
+ }
+
+ @Override
+ void close() {
+ mToken.unlinkToDeath(this, 0);
+ int portNumber = mInputPort.getPortNumber();
+ MidiDispatcher dispatcher = mOutputPortDispatchers[portNumber];
+ synchronized (dispatcher) {
+ dispatcher.getSender().disconnect(mInputPort);
+ int openCount = dispatcher.getReceiverCount();
+ mOutputPortOpenCount[portNumber] = openCount;
+ updateDeviceStatus();
+ }
+
+ mInputPorts.remove(mInputPort);
+ IoUtils.closeQuietly(mInputPort);
+ }
+ }
+
+ private final HashMap<IBinder, PortClient> mPortClients = new HashMap<IBinder, PortClient>();
+
+ // Binder interface stub for receiving connection requests from clients
+ private final IMidiDeviceServer mServer = new IMidiDeviceServer.Stub() {
+
+ @Override
+ public ParcelFileDescriptor openInputPort(IBinder token, int portNumber) {
+ if (mDeviceInfo.isPrivate()) {
+ if (Binder.getCallingUid() != Process.myUid()) {
+ throw new SecurityException("Can't access private device from different UID");
+ }
+ }
+
+ if (portNumber < 0 || portNumber >= mInputPortCount) {
+ Log.e(TAG, "portNumber out of range in openInputPort: " + portNumber);
+ return null;
+ }
+
+ synchronized (mInputPortOutputPorts) {
+ if (mInputPortOutputPorts[portNumber] != null) {
+ Log.d(TAG, "port " + portNumber + " already open");
+ return null;
+ }
+
+ try {
+ ParcelFileDescriptor[] pair = ParcelFileDescriptor.createSocketPair(
+ OsConstants.SOCK_SEQPACKET);
+ MidiOutputPort outputPort = new MidiOutputPort(pair[0], portNumber);
+ mInputPortOutputPorts[portNumber] = outputPort;
+ outputPort.connect(mInputPortReceivers[portNumber]);
+ InputPortClient client = new InputPortClient(token, outputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ mInputPortOpen[portNumber] = true;
+ updateDeviceStatus();
+ return pair[1];
+ } catch (IOException e) {
+ Log.e(TAG, "unable to create ParcelFileDescriptors in openInputPort");
+ return null;
+ }
+ }
+ }
+
+ @Override
+ public ParcelFileDescriptor openOutputPort(IBinder token, int portNumber) {
+ if (mDeviceInfo.isPrivate()) {
+ if (Binder.getCallingUid() != Process.myUid()) {
+ throw new SecurityException("Can't access private device from different UID");
+ }
+ }
+
+ if (portNumber < 0 || portNumber >= mOutputPortCount) {
+ Log.e(TAG, "portNumber out of range in openOutputPort: " + portNumber);
+ return null;
+ }
+
+ try {
+ ParcelFileDescriptor[] pair = ParcelFileDescriptor.createSocketPair(
+ OsConstants.SOCK_SEQPACKET);
+ MidiInputPort inputPort = new MidiInputPort(pair[0], portNumber);
+ MidiDispatcher dispatcher = mOutputPortDispatchers[portNumber];
+ synchronized (dispatcher) {
+ dispatcher.getSender().connect(inputPort);
+ int openCount = dispatcher.getReceiverCount();
+ mOutputPortOpenCount[portNumber] = openCount;
+ updateDeviceStatus();
+ }
+
+ mInputPorts.add(inputPort);
+ OutputPortClient client = new OutputPortClient(token, inputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ return pair[1];
+ } catch (IOException e) {
+ Log.e(TAG, "unable to create ParcelFileDescriptors in openOutputPort");
+ return null;
+ }
+ }
+
+ @Override
+ public void closePort(IBinder token) {
+ synchronized (mPortClients) {
+ PortClient client = mPortClients.remove(token);
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+ @Override
+ public void connectPorts(IBinder token, ParcelFileDescriptor pfd,
+ int outputPortNumber) {
+ MidiInputPort inputPort = new MidiInputPort(pfd, outputPortNumber);
+ mOutputPortDispatchers[outputPortNumber].getSender().connect(inputPort);
+ mInputPorts.add(inputPort);
+ OutputPortClient client = new OutputPortClient(token, inputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ }
+
+ @Override
+ public MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+ };
+
+ /* package */ MidiDeviceServer(IMidiManager midiManager, MidiReceiver[] inputPortReceivers,
+ int numOutputPorts, Callback callback) {
+ mMidiManager = midiManager;
+ mInputPortReceivers = inputPortReceivers;
+ mInputPortCount = inputPortReceivers.length;
+ mOutputPortCount = numOutputPorts;
+ mCallback = callback;
+
+ mInputPortOutputPorts = new MidiOutputPort[mInputPortCount];
+
+ mOutputPortDispatchers = new MidiDispatcher[numOutputPorts];
+ for (int i = 0; i < numOutputPorts; i++) {
+ mOutputPortDispatchers[i] = new MidiDispatcher();
+ }
+
+ mInputPortOpen = new boolean[mInputPortCount];
+ mOutputPortOpenCount = new int[numOutputPorts];
+
+ mGuard.open("close");
+ }
+
+ /* package */ IMidiDeviceServer getBinderInterface() {
+ return mServer;
+ }
+
+ public IBinder asBinder() {
+ return mServer.asBinder();
+ }
+
+ /* package */ void setDeviceInfo(MidiDeviceInfo deviceInfo) {
+ if (mDeviceInfo != null) {
+ throw new IllegalStateException("setDeviceInfo should only be called once");
+ }
+ mDeviceInfo = deviceInfo;
+ }
+
+ private void updateDeviceStatus() {
+ // clear calling identity, since we may be in a Binder call from one of our clients
+ long identityToken = Binder.clearCallingIdentity();
+
+ MidiDeviceStatus status = new MidiDeviceStatus(mDeviceInfo, mInputPortOpen,
+ mOutputPortOpenCount);
+ if (mCallback != null) {
+ mCallback.onDeviceStatusChanged(this, status);
+ }
+ try {
+ mMidiManager.setDeviceStatus(mDeviceStatusToken, status);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in updateDeviceStatus");
+ } finally {
+ Binder.restoreCallingIdentity(identityToken);
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+ mGuard.close();
+
+ for (int i = 0; i < mInputPortCount; i++) {
+ MidiOutputPort outputPort = mInputPortOutputPorts[i];
+ if (outputPort != null) {
+ IoUtils.closeQuietly(outputPort);
+ mInputPortOutputPorts[i] = null;
+ }
+ }
+ for (MidiInputPort inputPort : mInputPorts) {
+ IoUtils.closeQuietly(inputPort);
+ }
+ mInputPorts.clear();
+ try {
+ mMidiManager.unregisterDeviceServer(mServer);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in unregisterDeviceServer");
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ mGuard.warnIfOpen();
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's output ports.
+ * Clients can use these receivers to send data out the device's output ports.
+ * @return array of MidiReceivers
+ */
+ public MidiReceiver[] getOutputPortReceivers() {
+ MidiReceiver[] receivers = new MidiReceiver[mOutputPortCount];
+ System.arraycopy(mOutputPortDispatchers, 0, receivers, 0, mOutputPortCount);
+ return receivers;
+ }
+}
diff --git a/media/java/android/media/midi/MidiDeviceService.java b/media/java/android/media/midi/MidiDeviceService.java
new file mode 100644
index 0000000..ce12a4f
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceService.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.Log;
+
+/**
+ * A service that implements a virtual MIDI device.
+ * Subclasses must implement the {@link #onGetInputPortReceivers} method to provide a
+ * list of {@link MidiReceiver}s to receive data sent to the device's input ports.
+ * Similarly, subclasses can call {@link #getOutputPortReceivers} to fetch a list
+ * of {@link MidiReceiver}s for sending data out the output ports.
+ *
+ * <p>To extend this class, you must declare the service in your manifest file with
+ * an intent filter with the {@link #SERVICE_INTERFACE} action
+ * and meta-data to describe the virtual device.
+ For example:</p>
+ * <pre>
+ * &lt;service android:name=".VirtualDeviceService"
+ * android:label="&#64;string/service_name">
+ * &lt;intent-filter>
+ * &lt;action android:name="android.media.midi.MidiDeviceService" />
+ * &lt;/intent-filter>
+ * &lt;meta-data android:name="android.media.midi.MidiDeviceService"
+ android:resource="@xml/device_info" />
+ * &lt;/service></pre>
+ */
+abstract public class MidiDeviceService extends Service {
+ private static final String TAG = "MidiDeviceService";
+
+ public static final String SERVICE_INTERFACE = "android.media.midi.MidiDeviceService";
+
+ private IMidiManager mMidiManager;
+ private MidiDeviceServer mServer;
+ private MidiDeviceInfo mDeviceInfo;
+
+ private final MidiDeviceServer.Callback mCallback = new MidiDeviceServer.Callback() {
+ @Override
+ public void onDeviceStatusChanged(MidiDeviceServer server, MidiDeviceStatus status) {
+ MidiDeviceService.this.onDeviceStatusChanged(status);
+ }
+ };
+
+ @Override
+ public void onCreate() {
+ mMidiManager = IMidiManager.Stub.asInterface(
+ ServiceManager.getService(Context.MIDI_SERVICE));
+ MidiDeviceServer server;
+ try {
+ MidiDeviceInfo deviceInfo = mMidiManager.getServiceDeviceInfo(getPackageName(),
+ this.getClass().getName());
+ if (deviceInfo == null) {
+ Log.e(TAG, "Could not find MidiDeviceInfo for MidiDeviceService " + this);
+ return;
+ }
+ mDeviceInfo = deviceInfo;
+ MidiReceiver[] inputPortReceivers = onGetInputPortReceivers();
+ if (inputPortReceivers == null) {
+ inputPortReceivers = new MidiReceiver[0];
+ }
+ server = new MidiDeviceServer(mMidiManager, inputPortReceivers,
+ deviceInfo.getOutputPortCount(), mCallback);
+ server.setDeviceInfo(deviceInfo);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in IMidiManager.getServiceDeviceInfo");
+ server = null;
+ }
+ mServer = server;
+ }
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's input ports.
+ * Subclasses must override this to provide the receivers which will receive
+ * data sent to the device's input ports. An empty array should be returned if
+ * the device has no input ports.
+ * @return array of MidiReceivers
+ */
+ abstract public MidiReceiver[] onGetInputPortReceivers();
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's output ports.
+ * These can be used to send data out the device's output ports.
+ * @return array of MidiReceivers
+ */
+ public final MidiReceiver[] getOutputPortReceivers() {
+ if (mServer == null) {
+ return null;
+ } else {
+ return mServer.getOutputPortReceivers();
+ }
+ }
+
+ /**
+ * returns the {@link MidiDeviceInfo} instance for this service
+ * @return our MidiDeviceInfo
+ */
+ public final MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Called to notify when an our {@link MidiDeviceStatus} has changed
+ * @param status the number of the port that was opened
+ */
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ }
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ if (SERVICE_INTERFACE.equals(intent.getAction()) && mServer != null) {
+ return mServer.getBinderInterface().asBinder();
+ } else {
+ return null;
+ }
+ }
+}
diff --git a/media/java/android/media/midi/MidiDeviceStatus.aidl b/media/java/android/media/midi/MidiDeviceStatus.aidl
new file mode 100644
index 0000000..1a848c0
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceStatus.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+parcelable MidiDeviceStatus;
diff --git a/media/java/android/media/midi/MidiDeviceStatus.java b/media/java/android/media/midi/MidiDeviceStatus.java
new file mode 100644
index 0000000..acb54de
--- /dev/null
+++ b/media/java/android/media/midi/MidiDeviceStatus.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * This is an immutable class that describes the current status of a MIDI device's ports.
+ */
+public final class MidiDeviceStatus implements Parcelable {
+
+ private static final String TAG = "MidiDeviceStatus";
+
+ private final MidiDeviceInfo mDeviceInfo;
+ // true if input ports are open
+ private final boolean mInputPortOpen[];
+ // open counts for output ports
+ private final int mOutputPortOpenCount[];
+
+ /**
+ * @hide
+ */
+ public MidiDeviceStatus(MidiDeviceInfo deviceInfo, boolean inputPortOpen[],
+ int outputPortOpenCount[]) {
+ // MidiDeviceInfo is immutable so we can share references
+ mDeviceInfo = deviceInfo;
+
+ // make copies of the arrays
+ mInputPortOpen = new boolean[inputPortOpen.length];
+ System.arraycopy(inputPortOpen, 0, mInputPortOpen, 0, inputPortOpen.length);
+ mOutputPortOpenCount = new int[outputPortOpenCount.length];
+ System.arraycopy(outputPortOpenCount, 0, mOutputPortOpenCount, 0,
+ outputPortOpenCount.length);
+ }
+
+ /**
+ * Creates a MidiDeviceStatus with zero for all port open counts
+ * @hide
+ */
+ public MidiDeviceStatus(MidiDeviceInfo deviceInfo) {
+ mDeviceInfo = deviceInfo;
+ mInputPortOpen = new boolean[deviceInfo.getInputPortCount()];
+ mOutputPortOpenCount = new int[deviceInfo.getOutputPortCount()];
+ }
+
+ /**
+ * Returns the {@link MidiDeviceInfo} of the device.
+ *
+ * @return the device info
+ */
+ public MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Returns true if an input port is open.
+ * An input port can only be opened by one client at a time.
+ *
+ * @param portNumber the input port's port number
+ * @return input port open status
+ */
+ public boolean isInputPortOpen(int portNumber) {
+ return mInputPortOpen[portNumber];
+ }
+
+ /**
+ * Returns the number of clients currently connected to the specified output port.
+ * Unlike input ports, an output port can be opened by multiple clients at the same time.
+ *
+ * @param portNumber the output port's port number
+ * @return output port open count
+ */
+ public int getOutputPortOpenCount(int portNumber) {
+ return mOutputPortOpenCount[portNumber];
+ }
+
+ @Override
+ public String toString() {
+ int inputPortCount = mDeviceInfo.getInputPortCount();
+ int outputPortCount = mDeviceInfo.getOutputPortCount();
+ StringBuilder builder = new StringBuilder("mInputPortOpen=[");
+ for (int i = 0; i < inputPortCount; i++) {
+ builder.append(mInputPortOpen[i]);
+ if (i < inputPortCount -1) {
+ builder.append(",");
+ }
+ }
+ builder.append("] mOutputPortOpenCount=[");
+ for (int i = 0; i < outputPortCount; i++) {
+ builder.append(mOutputPortOpenCount[i]);
+ if (i < outputPortCount -1) {
+ builder.append(",");
+ }
+ }
+ builder.append("]");
+ return builder.toString();
+ }
+
+ public static final Parcelable.Creator<MidiDeviceStatus> CREATOR =
+ new Parcelable.Creator<MidiDeviceStatus>() {
+ public MidiDeviceStatus createFromParcel(Parcel in) {
+ ClassLoader classLoader = MidiDeviceInfo.class.getClassLoader();
+ MidiDeviceInfo deviceInfo = in.readParcelable(classLoader);
+ boolean[] inputPortOpen = in.createBooleanArray();
+ int[] outputPortOpenCount = in.createIntArray();
+ return new MidiDeviceStatus(deviceInfo, inputPortOpen, outputPortOpenCount);
+ }
+
+ public MidiDeviceStatus[] newArray(int size) {
+ return new MidiDeviceStatus[size];
+ }
+ };
+
+ public int describeContents() {
+ return 0;
+ }
+
+ public void writeToParcel(Parcel parcel, int flags) {
+ parcel.writeParcelable(mDeviceInfo, flags);
+ parcel.writeBooleanArray(mInputPortOpen);
+ parcel.writeIntArray(mOutputPortOpenCount);
+ }
+}
diff --git a/media/java/android/media/midi/MidiInputPort.java b/media/java/android/media/midi/MidiInputPort.java
new file mode 100644
index 0000000..af5a86c
--- /dev/null
+++ b/media/java/android/media/midi/MidiInputPort.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.RemoteException;
+import android.util.Log;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+/**
+ * This class is used for sending data to a port on a MIDI device
+ */
+public final class MidiInputPort extends MidiReceiver implements Closeable {
+ private static final String TAG = "MidiInputPort";
+
+ private IMidiDeviceServer mDeviceServer;
+ private final IBinder mToken;
+ private final int mPortNumber;
+ private ParcelFileDescriptor mParcelFileDescriptor;
+ private FileOutputStream mOutputStream;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ // buffer to use for sending data out our output stream
+ private final byte[] mBuffer = new byte[MidiPortImpl.MAX_PACKET_SIZE];
+
+ /* package */ MidiInputPort(IMidiDeviceServer server, IBinder token,
+ ParcelFileDescriptor pfd, int portNumber) {
+ super(MidiPortImpl.MAX_PACKET_DATA_SIZE);
+
+ mDeviceServer = server;
+ mToken = token;
+ mParcelFileDescriptor = pfd;
+ mPortNumber = portNumber;
+ mOutputStream = new FileOutputStream(pfd.getFileDescriptor());
+ mGuard.open("close");
+ }
+
+ /* package */ MidiInputPort(ParcelFileDescriptor pfd, int portNumber) {
+ this(null, null, pfd, portNumber);
+ }
+
+ /**
+ * Returns the port number of this port
+ *
+ * @return the port's port number
+ */
+ public final int getPortNumber() {
+ return mPortNumber;
+ }
+
+ @Override
+ public void onSend(byte[] msg, int offset, int count, long timestamp) throws IOException {
+ if (offset < 0 || count < 0 || offset + count > msg.length) {
+ throw new IllegalArgumentException("offset or count out of range");
+ }
+ if (count > MidiPortImpl.MAX_PACKET_DATA_SIZE) {
+ throw new IllegalArgumentException("count exceeds max message size");
+ }
+
+ synchronized (mBuffer) {
+ if (mOutputStream == null) {
+ throw new IOException("MidiInputPort is closed");
+ }
+ int length = MidiPortImpl.packData(msg, offset, count, timestamp, mBuffer);
+ mOutputStream.write(mBuffer, 0, length);
+ }
+ }
+
+ @Override
+ public void onFlush() throws IOException {
+ synchronized (mBuffer) {
+ if (mOutputStream == null) {
+ throw new IOException("MidiInputPort is closed");
+ }
+ int length = MidiPortImpl.packFlush(mBuffer);
+ mOutputStream.write(mBuffer, 0, length);
+ }
+ }
+
+ // used by MidiDevice.connectInputPort() to connect our socket directly to another device
+ /* package */ ParcelFileDescriptor claimFileDescriptor() {
+ synchronized (mBuffer) {
+ ParcelFileDescriptor pfd = mParcelFileDescriptor;
+ if (pfd != null) {
+ IoUtils.closeQuietly(mOutputStream);
+ mParcelFileDescriptor = null;
+ mOutputStream = null;
+ }
+ return pfd;
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+ mGuard.close();
+ synchronized (mBuffer) {
+ if (mParcelFileDescriptor != null) {
+ mParcelFileDescriptor.close();
+ mParcelFileDescriptor = null;
+ }
+ if (mOutputStream != null) {
+ mOutputStream.close();
+ mOutputStream = null;
+ }
+ }
+ if (mDeviceServer != null) {
+ try {
+ mDeviceServer.closePort(mToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiInputPort.close()");
+ }
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ mGuard.warnIfOpen();
+ // not safe to make binder calls from finalize()
+ mDeviceServer = null;
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+}
diff --git a/media/java/android/media/midi/MidiManager.java b/media/java/android/media/midi/MidiManager.java
new file mode 100644
index 0000000..d19cf36
--- /dev/null
+++ b/media/java/android/media/midi/MidiManager.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.bluetooth.BluetoothDevice;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.content.pm.ServiceInfo;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.util.HashMap;
+
+/**
+ * This class is the public application interface to the MIDI service.
+ *
+ * <p>You can obtain an instance of this class by calling
+ * {@link android.content.Context#getSystemService(java.lang.String) Context.getSystemService()}.
+ *
+ * {@samplecode
+ * MidiManager manager = (MidiManager) getSystemService(Context.MIDI_SERVICE);}
+ */
+public final class MidiManager {
+ private static final String TAG = "MidiManager";
+
+ /**
+ * Intent for starting BluetoothMidiService
+ * @hide
+ */
+ public static final String BLUETOOTH_MIDI_SERVICE_INTENT =
+ "android.media.midi.BluetoothMidiService";
+
+ /**
+ * BluetoothMidiService package name
+ */
+ private static final String BLUETOOTH_MIDI_SERVICE_PACKAGE = "com.android.bluetoothmidiservice";
+
+ /**
+ * BluetoothMidiService class name
+ */
+ private static final String BLUETOOTH_MIDI_SERVICE_CLASS =
+ "com.android.bluetoothmidiservice.BluetoothMidiService";
+
+ private final Context mContext;
+ private final IMidiManager mService;
+ private final IBinder mToken = new Binder();
+
+ private HashMap<DeviceCallback,DeviceListener> mDeviceListeners =
+ new HashMap<DeviceCallback,DeviceListener>();
+
+ // Binder stub for receiving device notifications from MidiService
+ private class DeviceListener extends IMidiDeviceListener.Stub {
+ private final DeviceCallback mCallback;
+ private final Handler mHandler;
+
+ public DeviceListener(DeviceCallback callback, Handler handler) {
+ mCallback = callback;
+ mHandler = handler;
+ }
+
+ @Override
+ public void onDeviceAdded(MidiDeviceInfo device) {
+ if (mHandler != null) {
+ final MidiDeviceInfo deviceF = device;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceAdded(deviceF);
+ }
+ });
+ } else {
+ mCallback.onDeviceAdded(device);
+ }
+ }
+
+ @Override
+ public void onDeviceRemoved(MidiDeviceInfo device) {
+ if (mHandler != null) {
+ final MidiDeviceInfo deviceF = device;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceRemoved(deviceF);
+ }
+ });
+ } else {
+ mCallback.onDeviceRemoved(device);
+ }
+ }
+
+ @Override
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ if (mHandler != null) {
+ final MidiDeviceStatus statusF = status;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceStatusChanged(statusF);
+ }
+ });
+ } else {
+ mCallback.onDeviceStatusChanged(status);
+ }
+ }
+ }
+
+ /**
+ * Callback class used for clients to receive MIDI device added and removed notifications
+ */
+ public static class DeviceCallback {
+ /**
+ * Called to notify when a new MIDI device has been added
+ *
+ * @param device a {@link MidiDeviceInfo} for the newly added device
+ */
+ public void onDeviceAdded(MidiDeviceInfo device) {
+ }
+
+ /**
+ * Called to notify when a MIDI device has been removed
+ *
+ * @param device a {@link MidiDeviceInfo} for the removed device
+ */
+ public void onDeviceRemoved(MidiDeviceInfo device) {
+ }
+
+ /**
+ * Called to notify when the status of a MIDI device has changed
+ *
+ * @param status a {@link MidiDeviceStatus} for the changed device
+ */
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ }
+ }
+
+ /**
+ * Listener class used for receiving the results of {@link #openDevice} and
+ * {@link #openBluetoothDevice}
+ */
+ public interface OnDeviceOpenedListener {
+ /**
+ * Called to respond to a {@link #openDevice} request
+ *
+ * @param device a {@link MidiDevice} for opened device, or null if opening failed
+ */
+ abstract public void onDeviceOpened(MidiDevice device);
+ }
+
+ /**
+ * @hide
+ */
+ public MidiManager(Context context, IMidiManager service) {
+ mContext = context;
+ mService = service;
+ }
+
+ /**
+ * Registers a callback to receive notifications when MIDI devices are added and removed.
+ *
+ * @param callback a {@link DeviceCallback} for MIDI device notifications
+ * @param handler The {@link android.os.Handler Handler} that will be used for delivering the
+ * device notifications. If handler is null, then the thread used for the
+ * callback is unspecified.
+ */
+ public void registerDeviceCallback(DeviceCallback callback, Handler handler) {
+ DeviceListener deviceListener = new DeviceListener(callback, handler);
+ try {
+ mService.registerListener(mToken, deviceListener);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in registerDeviceListener");
+ return;
+ }
+ mDeviceListeners.put(callback, deviceListener);
+ }
+
+ /**
+ * Unregisters a {@link DeviceCallback}.
+ *
+ * @param callback a {@link DeviceCallback} to unregister
+ */
+ public void unregisterDeviceCallback(DeviceCallback callback) {
+ DeviceListener deviceListener = mDeviceListeners.remove(callback);
+ if (deviceListener != null) {
+ try {
+ mService.unregisterListener(mToken, deviceListener);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in unregisterDeviceListener");
+ }
+ }
+ }
+
+ /**
+ * Gets the list of all connected MIDI devices.
+ *
+ * @return an array of all MIDI devices
+ */
+ public MidiDeviceInfo[] getDevices() {
+ try {
+ return mService.getDevices();
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getDevices");
+ return new MidiDeviceInfo[0];
+ }
+ }
+
+ private void sendOpenDeviceResponse(final MidiDevice device,
+ final OnDeviceOpenedListener listener, Handler handler) {
+ if (handler != null) {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ listener.onDeviceOpened(device);
+ }
+ });
+ } else {
+ listener.onDeviceOpened(device);
+ }
+ }
+
+ /**
+ * Opens a MIDI device for reading and writing.
+ *
+ * @param deviceInfo a {@link android.media.midi.MidiDeviceInfo} to open
+ * @param listener a {@link MidiManager.OnDeviceOpenedListener} to be called
+ * to receive the result
+ * @param handler the {@link android.os.Handler Handler} that will be used for delivering
+ * the result. If handler is null, then the thread used for the
+ * listener is unspecified.
+ */
+ public void openDevice(MidiDeviceInfo deviceInfo, OnDeviceOpenedListener listener,
+ Handler handler) {
+ MidiDevice device = null;
+ try {
+ IMidiDeviceServer server = mService.openDevice(mToken, deviceInfo);
+ if (server == null) {
+ ServiceInfo serviceInfo = (ServiceInfo)deviceInfo.getProperties().getParcelable(
+ MidiDeviceInfo.PROPERTY_SERVICE_INFO);
+ if (serviceInfo == null) {
+ Log.e(TAG, "no ServiceInfo for " + deviceInfo);
+ } else {
+ Intent intent = new Intent(MidiDeviceService.SERVICE_INTERFACE);
+ intent.setComponent(new ComponentName(serviceInfo.packageName,
+ serviceInfo.name));
+ final MidiDeviceInfo deviceInfoF = deviceInfo;
+ final OnDeviceOpenedListener listenerF = listener;
+ final Handler handlerF = handler;
+ if (mContext.bindService(intent,
+ new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName name, IBinder binder) {
+ IMidiDeviceServer server =
+ IMidiDeviceServer.Stub.asInterface(binder);
+ MidiDevice device = new MidiDevice(deviceInfoF, server, mContext,
+ this);
+ sendOpenDeviceResponse(device, listenerF, handlerF);
+ }
+
+ @Override
+ public void onServiceDisconnected(ComponentName name) {
+ // FIXME - anything to do here?
+ }
+ },
+ Context.BIND_AUTO_CREATE))
+ {
+ // return immediately to avoid calling sendOpenDeviceResponse below
+ return;
+ } else {
+ Log.e(TAG, "Unable to bind service: " + intent);
+ }
+ }
+ } else {
+ device = new MidiDevice(deviceInfo, server);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in openDevice");
+ }
+ sendOpenDeviceResponse(device, listener, handler);
+ }
+
+ /**
+ * Opens a Bluetooth MIDI device for reading and writing.
+ *
+ * @param bluetoothDevice a {@link android.bluetooth.BluetoothDevice} to open as a MIDI device
+ * @param listener a {@link MidiManager.OnDeviceOpenedListener} to be called to receive the
+ * result
+ * @param handler the {@link android.os.Handler Handler} that will be used for delivering
+ * the result. If handler is null, then the thread used for the
+ * listener is unspecified.
+ */
+ public void openBluetoothDevice(final BluetoothDevice bluetoothDevice,
+ final OnDeviceOpenedListener listener, final Handler handler) {
+ Intent intent = new Intent(BLUETOOTH_MIDI_SERVICE_INTENT);
+ intent.setComponent(new ComponentName(BLUETOOTH_MIDI_SERVICE_PACKAGE,
+ BLUETOOTH_MIDI_SERVICE_CLASS));
+ intent.putExtra("device", bluetoothDevice);
+ if (!mContext.bindService(intent,
+ new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName name, IBinder binder) {
+ IMidiDeviceServer server =
+ IMidiDeviceServer.Stub.asInterface(binder);
+ try {
+ // fetch MidiDeviceInfo from the server
+ MidiDeviceInfo deviceInfo = server.getDeviceInfo();
+ MidiDevice device = new MidiDevice(deviceInfo, server, mContext, this);
+ sendOpenDeviceResponse(device, listener, handler);
+ } catch (RemoteException e) {
+ Log.e(TAG, "remote exception in onServiceConnected");
+ sendOpenDeviceResponse(null, listener, handler);
+ }
+ }
+
+ @Override
+ public void onServiceDisconnected(ComponentName name) {
+ // FIXME - anything to do here?
+ }
+ },
+ Context.BIND_AUTO_CREATE))
+ {
+ Log.e(TAG, "Unable to bind service: " + intent);
+ sendOpenDeviceResponse(null, listener, handler);
+ }
+ }
+
+ /** @hide */
+ public MidiDeviceServer createDeviceServer(MidiReceiver[] inputPortReceivers,
+ int numOutputPorts, String[] inputPortNames, String[] outputPortNames,
+ Bundle properties, int type, MidiDeviceServer.Callback callback) {
+ try {
+ MidiDeviceServer server = new MidiDeviceServer(mService, inputPortReceivers,
+ numOutputPorts, callback);
+ MidiDeviceInfo deviceInfo = mService.registerDeviceServer(server.getBinderInterface(),
+ inputPortReceivers.length, numOutputPorts, inputPortNames, outputPortNames,
+ properties, type);
+ if (deviceInfo == null) {
+ Log.e(TAG, "registerVirtualDevice failed");
+ return null;
+ }
+ server.setDeviceInfo(deviceInfo);
+ return server;
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in createVirtualDevice");
+ return null;
+ }
+ }
+}
diff --git a/media/java/android/media/midi/MidiOutputPort.java b/media/java/android/media/midi/MidiOutputPort.java
new file mode 100644
index 0000000..0096995
--- /dev/null
+++ b/media/java/android/media/midi/MidiOutputPort.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.RemoteException;
+import android.util.Log;
+
+import com.android.internal.midi.MidiDispatcher;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+/**
+ * This class is used for receiving data from a port on a MIDI device
+ */
+public final class MidiOutputPort extends MidiSender implements Closeable {
+ private static final String TAG = "MidiOutputPort";
+
+ private IMidiDeviceServer mDeviceServer;
+ private final IBinder mToken;
+ private final int mPortNumber;
+ private final FileInputStream mInputStream;
+ private final MidiDispatcher mDispatcher = new MidiDispatcher();
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ // This thread reads MIDI events from a socket and distributes them to the list of
+ // MidiReceivers attached to this device.
+ private final Thread mThread = new Thread() {
+ @Override
+ public void run() {
+ byte[] buffer = new byte[MidiPortImpl.MAX_PACKET_SIZE];
+
+ try {
+ while (true) {
+ // read next event
+ int count = mInputStream.read(buffer);
+ if (count < 0) {
+ break;
+ // FIXME - inform receivers here?
+ }
+
+ int packetType = MidiPortImpl.getPacketType(buffer, count);
+ switch (packetType) {
+ case MidiPortImpl.PACKET_TYPE_DATA: {
+ int offset = MidiPortImpl.getDataOffset(buffer, count);
+ int size = MidiPortImpl.getDataSize(buffer, count);
+ long timestamp = MidiPortImpl.getPacketTimestamp(buffer, count);
+
+ // dispatch to all our receivers
+ mDispatcher.send(buffer, offset, size, timestamp);
+ break;
+ }
+ case MidiPortImpl.PACKET_TYPE_FLUSH:
+ mDispatcher.flush();
+ break;
+ default:
+ Log.e(TAG, "Unknown packet type " + packetType);
+ break;
+ }
+ }
+ } catch (IOException e) {
+ // FIXME report I/O failure?
+ Log.e(TAG, "read failed");
+ } finally {
+ IoUtils.closeQuietly(mInputStream);
+ }
+ }
+ };
+
+ /* package */ MidiOutputPort(IMidiDeviceServer server, IBinder token,
+ ParcelFileDescriptor pfd, int portNumber) {
+ mDeviceServer = server;
+ mToken = token;
+ mPortNumber = portNumber;
+ mInputStream = new ParcelFileDescriptor.AutoCloseInputStream(pfd);
+ mThread.start();
+ mGuard.open("close");
+ }
+
+ /* package */ MidiOutputPort(ParcelFileDescriptor pfd, int portNumber) {
+ this(null, null, pfd, portNumber);
+ }
+
+ /**
+ * Returns the port number of this port
+ *
+ * @return the port's port number
+ */
+ public final int getPortNumber() {
+ return mPortNumber;
+ }
+
+ @Override
+ public void onConnect(MidiReceiver receiver) {
+ mDispatcher.getSender().connect(receiver);
+ }
+
+ @Override
+ public void onDisconnect(MidiReceiver receiver) {
+ mDispatcher.getSender().disconnect(receiver);
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+
+ mGuard.close();
+ mInputStream.close();
+ if (mDeviceServer != null) {
+ try {
+ mDeviceServer.closePort(mToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiOutputPort.close()");
+ }
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ mGuard.warnIfOpen();
+ // not safe to make binder calls from finalize()
+ mDeviceServer = null;
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+}
diff --git a/media/java/android/media/midi/MidiPortImpl.java b/media/java/android/media/midi/MidiPortImpl.java
new file mode 100644
index 0000000..16fc214
--- /dev/null
+++ b/media/java/android/media/midi/MidiPortImpl.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+/**
+ * This class contains utilities for socket communication between a
+ * MidiInputPort and MidiOutputPort
+ */
+/* package */ class MidiPortImpl {
+ private static final String TAG = "MidiPort";
+
+ /**
+ * Packet type for data packet
+ */
+ public static final int PACKET_TYPE_DATA = 1;
+
+ /**
+ * Packet type for flush packet
+ */
+ public static final int PACKET_TYPE_FLUSH = 2;
+
+ /**
+ * Maximum size of a packet that can pass through our ParcelFileDescriptor.
+ */
+ public static final int MAX_PACKET_SIZE = 1024;
+
+ /**
+ * size of message timestamp in bytes
+ */
+ private static final int TIMESTAMP_SIZE = 8;
+
+ /**
+ * Data packet overhead is timestamp size plus packet type byte
+ */
+ private static final int DATA_PACKET_OVERHEAD = TIMESTAMP_SIZE + 1;
+
+ /**
+ * Maximum amount of MIDI data that can be included in a packet
+ */
+ public static final int MAX_PACKET_DATA_SIZE = MAX_PACKET_SIZE - DATA_PACKET_OVERHEAD;
+
+ /**
+ * Utility function for packing MIDI data to be sent through our ParcelFileDescriptor
+ *
+ * message byte array contains variable length MIDI message.
+ * messageSize is size of variable length MIDI message
+ * timestamp is message timestamp to pack
+ * dest is buffer to pack into
+ * returns size of packed message
+ */
+ public static int packData(byte[] message, int offset, int size, long timestamp,
+ byte[] dest) {
+ if (size > MAX_PACKET_DATA_SIZE) {
+ size = MAX_PACKET_DATA_SIZE;
+ }
+ int length = 0;
+ // packet type goes first
+ dest[length++] = PACKET_TYPE_DATA;
+ // data goes next
+ System.arraycopy(message, offset, dest, length, size);
+ length += size;
+
+ // followed by timestamp
+ for (int i = 0; i < TIMESTAMP_SIZE; i++) {
+ dest[length++] = (byte)timestamp;
+ timestamp >>= 8;
+ }
+
+ return length;
+ }
+
+ /**
+ * Utility function for packing a flush command to be sent through our ParcelFileDescriptor
+ */
+ public static int packFlush(byte[] dest) {
+ dest[0] = PACKET_TYPE_FLUSH;
+ return 1;
+ }
+
+ /**
+ * Returns the packet type (PACKET_TYPE_DATA or PACKET_TYPE_FLUSH)
+ */
+ public static int getPacketType(byte[] buffer, int bufferLength) {
+ return buffer[0];
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
+ * returns the offset of the MIDI message in packed buffer
+ */
+ public static int getDataOffset(byte[] buffer, int bufferLength) {
+ // data follows packet type byte
+ return 1;
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
+ * returns size of MIDI data in packed buffer
+ */
+ public static int getDataSize(byte[] buffer, int bufferLength) {
+ // message length is total buffer length minus size of the timestamp
+ return bufferLength - DATA_PACKET_OVERHEAD;
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
+ * unpacks timestamp from packed buffer
+ */
+ public static long getPacketTimestamp(byte[] buffer, int bufferLength) {
+ // timestamp is at end of the packet
+ int offset = bufferLength;
+ long timestamp = 0;
+
+ for (int i = 0; i < TIMESTAMP_SIZE; i++) {
+ int b = (int)buffer[--offset] & 0xFF;
+ timestamp = (timestamp << 8) | b;
+ }
+ return timestamp;
+ }
+}
diff --git a/media/java/android/media/midi/MidiReceiver.java b/media/java/android/media/midi/MidiReceiver.java
new file mode 100644
index 0000000..12a5f04
--- /dev/null
+++ b/media/java/android/media/midi/MidiReceiver.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import java.io.IOException;
+
+/**
+ * Interface for sending and receiving data to and from a MIDI device.
+ */
+abstract public class MidiReceiver {
+
+ private final int mMaxMessageSize;
+
+ /**
+ * Default MidiReceiver constructor. Maximum message size is set to
+ * {@link java.lang.Integer#MAX_VALUE}
+ */
+ public MidiReceiver() {
+ mMaxMessageSize = Integer.MAX_VALUE;
+ }
+
+ /**
+ * MidiReceiver constructor.
+ * @param maxMessageSize the maximum size of a message this receiver can receive
+ */
+ public MidiReceiver(int maxMessageSize) {
+ mMaxMessageSize = maxMessageSize;
+ }
+
+ /**
+ * Called whenever the receiver is passed new MIDI data.
+ * Subclasses override this method to receive MIDI data.
+ * May fail if count exceeds {@link #getMaxMessageSize}.
+ *
+ * NOTE: the msg array parameter is only valid within the context of this call.
+ * The msg bytes should be copied by the receiver rather than retaining a reference
+ * to this parameter.
+ * Also, modifying the contents of the msg array parameter may result in other receivers
+ * in the same application receiving incorrect values in their {link #onSend} method.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be processed
+ * @param count the number of bytes of MIDI data in the array to be processed
+ * @param timestamp the timestamp of the message (based on {@link java.lang.System#nanoTime}
+ * @throws IOException
+ */
+ abstract public void onSend(byte[] msg, int offset, int count, long timestamp)
+ throws IOException;
+
+ /**
+ * Instructs the receiver to discard all pending MIDI data.
+ * @throws IOException
+ */
+ public void flush() throws IOException {
+ onFlush();
+ }
+
+ /**
+ * Called when the receiver is instructed to discard all pending MIDI data.
+ * Subclasses should override this method if they maintain a list or queue of MIDI data
+ * to be processed in the future.
+ * @throws IOException
+ */
+ public void onFlush() throws IOException {
+ }
+
+ /**
+ * Returns the maximum size of a message this receiver can receive.
+ * @return maximum message size
+ */
+ public final int getMaxMessageSize() {
+ return mMaxMessageSize;
+ }
+
+ /**
+ * Called to send MIDI data to the receiver without a timestamp.
+ * Data will be processed by receiver in the order sent.
+ * Data will get split into multiple calls to {@link #onSend} if count exceeds
+ * {@link #getMaxMessageSize}. Blocks until all the data is sent or an exception occurs.
+ * In the latter case, the amount of data sent prior to the exception is not provided to caller.
+ * The communication should be considered corrupt. The sender should reestablish
+ * communication, reset all controllers and send all notes off.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be sent
+ * @param count the number of bytes of MIDI data in the array to be sent
+ * @throws IOException if the data could not be sent in entirety
+ */
+ public void send(byte[] msg, int offset, int count) throws IOException {
+ // TODO add public static final TIMESTAMP_NONE = 0L
+ send(msg, offset, count, 0L);
+ }
+
+ /**
+ * Called to send MIDI data to the receiver with a specified timestamp.
+ * Data will be processed by receiver in order first by timestamp, then in the order sent.
+ * Data will get split into multiple calls to {@link #onSend} if count exceeds
+ * {@link #getMaxMessageSize}. Blocks until all the data is sent or an exception occurs.
+ * In the latter case, the amount of data sent prior to the exception is not provided to caller.
+ * The communication should be considered corrupt. The sender should reestablish
+ * communication, reset all controllers and send all notes off.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be sent
+ * @param count the number of bytes of MIDI data in the array to be sent
+ * @param timestamp the timestamp of the message, based on {@link java.lang.System#nanoTime}
+ * @throws IOException if the data could not be sent in entirety
+ */
+ public void send(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+ int messageSize = getMaxMessageSize();
+ while (count > 0) {
+ int length = (count > messageSize ? messageSize : count);
+ onSend(msg, offset, length, timestamp);
+ offset += length;
+ count -= length;
+ }
+ }
+}
diff --git a/media/java/android/media/midi/MidiSender.java b/media/java/android/media/midi/MidiSender.java
new file mode 100644
index 0000000..c5f1edc
--- /dev/null
+++ b/media/java/android/media/midi/MidiSender.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+/**
+ * Interface provided by a device to allow attaching
+ * MidiReceivers to a MIDI device.
+ */
+abstract public class MidiSender {
+
+ /**
+ * Connects a {@link MidiReceiver} to the sender
+ *
+ * @param receiver the receiver to connect
+ */
+ public void connect(MidiReceiver receiver) {
+ if (receiver == null) {
+ throw new NullPointerException("receiver null in MidiSender.connect");
+ }
+ onConnect(receiver);
+ }
+
+ /**
+ * Disconnects a {@link MidiReceiver} from the sender
+ *
+ * @param receiver the receiver to disconnect
+ */
+ public void disconnect(MidiReceiver receiver) {
+ if (receiver == null) {
+ throw new NullPointerException("receiver null in MidiSender.disconnect");
+ }
+ onDisconnect(receiver);
+ }
+
+ /**
+ * Called to connect a {@link MidiReceiver} to the sender
+ *
+ * @param receiver the receiver to connect
+ */
+ abstract public void onConnect(MidiReceiver receiver);
+
+ /**
+ * Called to disconnect a {@link MidiReceiver} from the sender
+ *
+ * @param receiver the receiver to disconnect
+ */
+ abstract public void onDisconnect(MidiReceiver receiver);
+}
diff --git a/media/java/android/media/midi/package.html b/media/java/android/media/midi/package.html
new file mode 100644
index 0000000..bd589a9
--- /dev/null
+++ b/media/java/android/media/midi/package.html
@@ -0,0 +1,321 @@
+<html>
+<body>
+<p>Android MIDI User Guide</p>
+
+<h1 id=overview>Overview</h1>
+
+
+<p>This document describes how to use the Android MIDI API in Java.</p>
+
+<p>The Android MIDI package allows users to:</p>
+
+<ul>
+ <li> Connect a MIDI keyboard to Android to play a synthesizer or drive music apps.
+ <li> Connect alternative MIDI controllers to Android.
+ <li> Drive external MIDI synths from Android.
+ <li> Drive external peripherals, lights, show control, etc from Android.
+ <li> Generate music dynamically from games or music creation apps.
+ <li> Generate MIDI messages in one app and send them to a second app.
+ <li> Use an Android device running in <em>peripheral mode</em> as a multitouch controller connected to a laptop.
+</ul>
+
+<h2 id=the_api_features_include>The API features include:</h2>
+
+
+<ul>
+ <li> Enumeration of currently available devices. Information includes name, vendor,
+capabilities, etc.
+ <li> Provide notification when MIDI devices are plugged in or unplugged.
+ <li> Support efficient transmission of single or multiple short 1-3 byte MIDI
+messages.
+ <li> Support transmission of arbitrary length data for SysEx, etc.
+ <li> Timestamps to avoid jitter.
+ <li> Support direction connection or &ldquo;patching&rdquo; of devices for lower latency.
+</ul>
+
+<h2 id=transports_supported>Transports Supported</h2>
+
+
+<p>The API is &ldquo;transport agnostic&rdquo;. But there are several transports currently
+supported:</p>
+
+<ul>
+ <li> USB
+ <li> software routing
+ <li> BTLE
+</ul>
+
+<h1 id=android_midi_terminology>Android MIDI Terminology</h1>
+
+
+<h2 id=terminology>Terminology</h2>
+
+
+<p>A <strong>Device</strong> is a MIDI capable object that has zero or more InputPorts and OutputPorts.</p>
+
+<p>An <strong>InputPort</strong> has 16 channels and can <strong>receive</strong> MIDI messages from an OutputPort or an app.</p>
+
+<p>An <strong>OutputPort</strong> has 16 channels and can <strong>send</strong> MIDI messages to an InputPort or an app.</p>
+
+<p><strong>MidiService</strong> is a centralized process that keeps track of all devices and brokers
+communication between them.</p>
+
+<p><strong>MidiManager</strong> is a class that the application or a device manager calls to communicate with
+the MidiService.</p>
+
+<h1 id=writing_a_midi_application>Writing a MIDI Application</h1>
+
+
+<h2 id=the_midimanager>The MidiManager</h2>
+
+
+<p>The primary class for accessing the MIDI package is through the MidiManager.</p>
+
+<pre class=prettyprint>
+MidiManager m = (MidiManager)context.getSystemService(Context.MIDI_SERVICE);
+</pre>
+
+
+<h2 id=get_list_of_already_plugged_in_entities>Get List of Already Plugged In Entities</h2>
+
+
+<p>When an app starts, it can get a list of all the available MIDI devices. This
+information can be presented to a user, allowing them to choose a device.</p>
+
+<pre class=prettyprint>
+MidiDeviceInfo[] infos = m.getDevices();
+</pre>
+
+
+<h2 id=notification_of_midi_devices_hotplug_events>Notification of MIDI Devices HotPlug Events</h2>
+
+
+<p>The application can request notification when, for example, keyboards are
+plugged in or unplugged.</p>
+
+<pre class=prettyprint>
+m.registerDeviceCallback(new MidiManager.DeviceCallback() {
+ public void onDeviceAdded( MidiDeviceInfo info ) {
+ ...
+ }
+ public void onDeviceRemoved( MidiDeviceInfo info ) {
+ ...
+ }
+ });
+</pre>
+
+
+<h2 id=device_and_port_information>Device and Port Information</h2>
+
+
+<p>You can query the number of input and output ports.</p>
+
+<pre class=prettyprint>
+int numInputs = info.getInputPortCount();
+int numOutputs = info.getOutputPortCount();
+</pre>
+
+
+<p>Note that &ldquo;input&rdquo; and &ldquo;output&rdquo; are from the standpoint of the device. So a
+synthesizer will have an &ldquo;input&rdquo; port that receives messages. A keyboard will
+have an &ldquo;output&rdquo; port that sends messages.</p>
+
+<p>The MidiDeviceInfo has a bundle of properties.</p>
+
+<pre class=prettyprint>
+Bundle properties = info.getProperties();
+String manufacturer = properties
+ .getString(MidiDeviceInfo.PROPERTY_MANUFACTURER);
+</pre>
+
+
+<p>Other properties include PROPERTY_PRODUCT, PROPERTY_NAME,
+PROPERTY_SERIAL_NUMBER</p>
+
+<p>You can get the names of the ports from a PortInfo object.</p>
+
+<pre class=prettyprint>
+PortInfo portInfo = info.getInputPortInfo(i);
+String portName = portInfo.getName();
+</pre>
+
+
+<h2 id=open_a_midi_device>Open a MIDI Device</h2>
+
+
+<p>To access a MIDI device you need to open it first. The open is asynchronous so
+you need to provide a callback for completion. You can specify an optional
+Handler if you want the callback to occur on a specific Thread.</p>
+
+<pre class=prettyprint>
+m.openDevice(info, new MidiManager.OnDeviceOpenedListener() {
+ &#64;Override
+ public void onDeviceOpened(MidiDevice device) {
+ if (device == null) {
+ Log.e(TAG, "could not open device " + info);
+ } else {
+ ...
+ }, new Handler(Looper.getMainLooper())
+ );
+</pre>
+
+
+<h2 id=open_a_midi_input_port>Open a MIDI Input Port</h2>
+
+
+<p>If you want to send a message to a MIDI Device then you need to open an &ldquo;input&rdquo;
+port with exclusive access.</p>
+
+<pre class=prettyprint>
+MidiInputPort inputPort = device.openInputPort(index);
+</pre>
+
+
+<h2 id=send_a_noteon>Send a NoteOn</h2>
+
+
+<p>MIDI messages are sent as byte arrays. Here we encode a NoteOn message.</p>
+
+<pre class=prettyprint>
+byte[] buffer = new buffer[64];
+int numBytes = 0;
+buffer[numBytes++] = 0x90 + channel; // note on
+buffer[numBytes++] = pitch;
+buffer[numBytes++] = velocity;
+int offset = 0;
+// post is non-blocking
+inputPort.send(buffer, offset, numBytes);
+</pre>
+
+
+<p>Sometimes it is convenient to send MIDI messages with a timestamp. By
+scheduling events in the future we can mask scheduling jitter. Android MIDI
+timestamps are based on the monotonic nanosecond system timer. This is
+consistent with the other audio and input timers.</p>
+
+<p>Here we send a message with a timestamp 2 seconds in the future.</p>
+
+<pre class=prettyprint>
+long now = System.nanoTime();
+long future = now + (2 * 1000000000);
+inputPort.send(buffer, offset, numBytes, future);
+</pre>
+
+
+<p>If you want to cancel events that you have scheduled in the future then call
+flush().</p>
+
+<pre class=prettyprint>
+inputPort.flush(); // discard events
+</pre>
+
+
+<p>If there were any MIDI NoteOff message left in the buffer then they will be
+discarded and you may get stuck notes. So we recommend sending &ldquo;all notes off&rdquo;
+after doing a flush.</p>
+
+<h2 id=receive_a_note>Receive a Note</h2>
+
+
+<p>To receive MIDI data from a device you need to extend MidiReceiver. Then
+connect your receiver to an output port of the device.</p>
+
+<pre class=prettyprint>
+class MyReceiver extends MidiReceiver {
+ public void onSend(byte[] data, int offset,
+ int count, long timestamp) throws IOException {
+ // parse MIDI or whatever
+ }
+}
+MidiOutputPort outputPort = device.openOutputPort(index);
+outputPort.connect(new MyReceiver());
+</pre>
+
+
+<p>The data that arrives is not validated or aligned in any particular way. It is
+raw MIDI data and can contain multiple messages or partial messages. It might
+contain System Real-Time messages, which can be interleaved inside other
+messages.</p>
+
+<h1 id=creating_a_midi_virtual_device_service>Creating a MIDI Virtual Device Service</h1>
+
+
+<p>An app can provide a MIDI Service that can be used by other apps. For example,
+an app can provide a custom synthesizer that other apps can send messages to. </p>
+
+<h2 id=manifest_files>Manifest Files</h2>
+
+
+<p>An app declares that it will function as a MIDI server in the
+AndroidManifest.xml file.</p>
+
+<pre class=prettyprint>
+&lt;service android:name="<strong>MySynthDeviceService</strong>">
+ &lt;intent-filter>
+ &lt;action android:name="android.media.midi.MidiDeviceService" />
+ &lt;/intent-filter>
+ &lt;meta-data android:name="android.media.midi.MidiDeviceService"
+ android:resource="@xml/<strong>synth_device_info</strong>" />
+&lt;/service>
+</pre>
+
+
+<p>The details of the resource in this example is stored in
+&ldquo;res/xml/synth_device_info.xml&rdquo;.</p>
+
+<pre class=prettyprint>
+&lt;devices>
+ &lt;device manufacturer="MyCompany" product="MidiSynthBasic">
+ &lt;input-port name="input" />
+ &lt;/device>
+&lt;/devices>
+</pre>
+
+
+<h2 id=extend_midideviceservice>Extend MidiDeviceService</h2>
+
+
+<p>You then define your server by extending android.media.midi.MidiDeviceService.
+Let&lsquo;s assume you have a MySynthEngine class that extends MidiReceiver.</p>
+
+<pre class=prettyprint>
+import android.media.midi.MidiDeviceService;
+import android.media.midi.MidiDeviceStatus;
+import android.media.midi.MidiReceiver;
+
+public class MidiSynthDeviceService extends MidiDeviceService {
+ private static final String TAG = "MidiSynthDeviceService";
+ private MySynthEngine mSynthEngine = new MySynthEngine();
+ &#64;Override
+ public void onCreate() {
+ super.onCreate();
+ }
+
+ &#64;Override
+ public void onDestroy() {
+ mSynthEngine.stop();
+ super.onDestroy();
+ }
+
+ &#64;Override
+ // Declare the receivers associated with your input ports.
+ public MidiReceiver[] onGetInputPortReceivers() {
+ return new MidiReceiver[] { mSynthEngine };
+ }
+
+ /**
+ * This will get called when clients connect or disconnect.
+ * You can use it to turn on your synth only when needed.
+ */
+ &#64;Override
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ if (status.isInputPortOpen(0)) {
+ mSynthEngine.start();
+ } else {
+ mSynthEngine.stop();
+ }
+ }
+}
+</pre>
+</body>
+</html>
diff --git a/media/java/android/media/projection/MediaProjection.java b/media/java/android/media/projection/MediaProjection.java
index a6bde1d..e757f09 100644
--- a/media/java/android/media/projection/MediaProjection.java
+++ b/media/java/android/media/projection/MediaProjection.java
@@ -25,7 +25,6 @@ import android.media.AudioRecord;
import android.media.projection.IMediaProjection;
import android.media.projection.IMediaProjectionCallback;
import android.os.Handler;
-import android.os.Looper;
import android.os.RemoteException;
import android.util.ArrayMap;
import android.util.Log;
diff --git a/media/java/android/media/projection/MediaProjectionManager.java b/media/java/android/media/projection/MediaProjectionManager.java
index a1cfc35..f4a548b 100644
--- a/media/java/android/media/projection/MediaProjectionManager.java
+++ b/media/java/android/media/projection/MediaProjectionManager.java
@@ -22,7 +22,6 @@ import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.media.projection.IMediaProjection;
-import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
diff --git a/media/java/android/media/session/ISessionCallback.aidl b/media/java/android/media/session/ISessionCallback.aidl
index 49087b0..adb6b06 100644
--- a/media/java/android/media/session/ISessionCallback.aidl
+++ b/media/java/android/media/session/ISessionCallback.aidl
@@ -15,8 +15,8 @@
package android.media.session;
-import android.media.Rating;
import android.content.Intent;
+import android.media.Rating;
import android.net.Uri;
import android.os.Bundle;
import android.os.ResultReceiver;
@@ -30,8 +30,9 @@ oneway interface ISessionCallback {
// These callbacks are for the TransportPerformer
void onPlay();
- void onPlayFromMediaId(String uri, in Bundle extras);
+ void onPlayFromMediaId(String mediaId, in Bundle extras);
void onPlayFromSearch(String query, in Bundle extras);
+ void onPlayFromUri(in Uri uri, in Bundle extras);
void onSkipToTrack(long id);
void onPause();
void onStop();
diff --git a/media/java/android/media/session/ISessionController.aidl b/media/java/android/media/session/ISessionController.aidl
index d684688..285e5f7 100644
--- a/media/java/android/media/session/ISessionController.aidl
+++ b/media/java/android/media/session/ISessionController.aidl
@@ -21,9 +21,9 @@ import android.content.pm.ParceledListSlice;
import android.media.MediaMetadata;
import android.media.Rating;
import android.media.session.ISessionControllerCallback;
+import android.media.session.MediaSession;
import android.media.session.ParcelableVolumeInfo;
import android.media.session.PlaybackState;
-import android.media.session.MediaSession;
import android.net.Uri;
import android.os.Bundle;
import android.os.ResultReceiver;
@@ -51,8 +51,9 @@ interface ISessionController {
// These commands are for the TransportControls
void play();
- void playFromMediaId(String uri, in Bundle extras);
+ void playFromMediaId(String mediaId, in Bundle extras);
void playFromSearch(String string, in Bundle extras);
+ void playFromUri(in Uri uri, in Bundle extras);
void skipToQueueItem(long id);
void pause();
void stop();
diff --git a/media/java/android/media/session/MediaController.java b/media/java/android/media/session/MediaController.java
index dd6bd20..2acee04 100644
--- a/media/java/android/media/session/MediaController.java
+++ b/media/java/android/media/session/MediaController.java
@@ -504,8 +504,8 @@ public final class MediaController {
}
/**
- * Callback for receiving updates on from the session. A Callback can be
- * registered using {@link #registerCallback}
+ * Callback for receiving updates from the session. A Callback can be
+ * registered using {@link #registerCallback}.
*/
public static abstract class Callback {
/**
@@ -603,9 +603,9 @@ public final class MediaController {
}
/**
- * Request that the player start playback for a specific {@link Uri}.
+ * Request that the player start playback for a specific media id.
*
- * @param mediaId The uri of the requested media.
+ * @param mediaId The id of the requested media.
* @param extras Optional extras that can include extra information about the media item
* to be played.
*/
@@ -644,6 +644,25 @@ public final class MediaController {
}
/**
+ * Request that the player start playback for a specific {@link Uri}.
+ *
+ * @param uri The URI of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be played.
+ */
+ public void playFromUri(Uri uri, Bundle extras) {
+ if (uri == null || Uri.EMPTY.equals(uri)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty Uri for playFromUri.");
+ }
+ try {
+ mSessionBinder.playFromUri(uri, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play(" + uri + ").", e);
+ }
+ }
+
+ /**
* Play an item with a specific id in the play queue. If you specify an
* id that is not in the play queue, the behavior is undefined.
*/
diff --git a/media/java/android/media/session/MediaSession.java b/media/java/android/media/session/MediaSession.java
index df4bc78..e1e9b79 100644
--- a/media/java/android/media/session/MediaSession.java
+++ b/media/java/android/media/session/MediaSession.java
@@ -524,6 +524,10 @@ public final class MediaSession {
postToCallback(CallbackMessageHandler.MSG_PLAY_SEARCH, query, extras);
}
+ private void dispatchPlayFromUri(Uri uri, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PLAY_URI, uri, extras);
+ }
+
private void dispatchSkipToItem(long id) {
postToCallback(CallbackMessageHandler.MSG_SKIP_TO_ITEM, id);
}
@@ -816,6 +820,12 @@ public final class MediaSession {
}
/**
+ * Override to handle requests to play a specific media item represented by a URI.
+ */
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ }
+
+ /**
* Override to handle requests to play an item with a given id from the
* play queue.
*/
@@ -944,6 +954,14 @@ public final class MediaSession {
}
@Override
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlayFromUri(uri, extras);
+ }
+ }
+
+ @Override
public void onSkipToTrack(long id) {
MediaSession session = mMediaSession.get();
if (session != null) {
@@ -1154,6 +1172,7 @@ public final class MediaSession {
private static final int MSG_COMMAND = 15;
private static final int MSG_ADJUST_VOLUME = 16;
private static final int MSG_SET_VOLUME = 17;
+ private static final int MSG_PLAY_URI = 18;
private MediaSession.Callback mCallback;
@@ -1193,6 +1212,9 @@ public final class MediaSession {
case MSG_PLAY_SEARCH:
mCallback.onPlayFromSearch((String) msg.obj, msg.getData());
break;
+ case MSG_PLAY_URI:
+ mCallback.onPlayFromUri((Uri) msg.obj, msg.getData());
+ break;
case MSG_SKIP_TO_ITEM:
mCallback.onSkipToQueueItem((Long) msg.obj);
break;
diff --git a/media/java/android/media/session/MediaSessionLegacyHelper.java b/media/java/android/media/session/MediaSessionLegacyHelper.java
index 7ea269b..c61d7ad 100644
--- a/media/java/android/media/session/MediaSessionLegacyHelper.java
+++ b/media/java/android/media/session/MediaSessionLegacyHelper.java
@@ -30,12 +30,9 @@ import android.media.MediaMetadata;
import android.media.MediaMetadataEditor;
import android.media.MediaMetadataRetriever;
import android.media.Rating;
-import android.media.RemoteControlClient;
-import android.media.RemoteControlClient.MetadataEditor;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
-import android.os.RemoteException;
import android.util.ArrayMap;
import android.util.Log;
import android.view.KeyEvent;
@@ -200,17 +197,17 @@ public class MediaSessionLegacyHelper {
break;
}
if (down || up) {
- int flags;
+ int flags = AudioManager.FLAG_FROM_KEY;
if (musicOnly) {
// This flag is used when the screen is off to only affect
// active media
- flags = AudioManager.FLAG_ACTIVE_MEDIA_ONLY;
+ flags |= AudioManager.FLAG_ACTIVE_MEDIA_ONLY;
} else {
// These flags are consistent with the home screen
if (up) {
- flags = AudioManager.FLAG_PLAY_SOUND | AudioManager.FLAG_VIBRATE;
+ flags |= AudioManager.FLAG_PLAY_SOUND | AudioManager.FLAG_VIBRATE;
} else {
- flags = AudioManager.FLAG_SHOW_UI | AudioManager.FLAG_VIBRATE;
+ flags |= AudioManager.FLAG_SHOW_UI | AudioManager.FLAG_VIBRATE;
}
}
if (direction != 0) {
@@ -221,14 +218,10 @@ public class MediaSessionLegacyHelper {
mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
direction, flags);
} else if (isMute) {
- if (down) {
- // We need to send two volume events on down, one to mute
- // and one to show the UI
+ if (down && keyEvent.getRepeatCount() == 0) {
mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
- MediaSessionManager.DIRECTION_MUTE, flags);
+ AudioManager.ADJUST_TOGGLE_MUTE, flags);
}
- mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
- 0 /* direction, causes UI to show on down */, flags);
}
}
}
diff --git a/media/java/android/media/session/MediaSessionManager.java b/media/java/android/media/session/MediaSessionManager.java
index a4ef851..6ac0efb 100644
--- a/media/java/android/media/session/MediaSessionManager.java
+++ b/media/java/android/media/session/MediaSessionManager.java
@@ -29,7 +29,6 @@ import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.UserHandle;
import android.service.notification.NotificationListenerService;
-import android.text.TextUtils;
import android.util.ArrayMap;
import android.util.Log;
import android.view.KeyEvent;
@@ -59,14 +58,6 @@ public final class MediaSessionManager {
private Context mContext;
/**
- * Special flag for sending the mute key to dispatchAdjustVolume used by the
- * system.
- *
- * @hide
- */
- public static final int DIRECTION_MUTE = -99;
-
- /**
* @hide
*/
public MediaSessionManager(Context context) {
diff --git a/media/java/android/media/session/PlaybackState.java b/media/java/android/media/session/PlaybackState.java
index 54d0acd..bbe04b5 100644
--- a/media/java/android/media/session/PlaybackState.java
+++ b/media/java/android/media/session/PlaybackState.java
@@ -23,8 +23,6 @@ import android.os.Parcel;
import android.os.Parcelable;
import android.os.SystemClock;
import android.text.TextUtils;
-import android.util.Log;
-
import java.util.ArrayList;
import java.util.List;
@@ -128,6 +126,13 @@ public final class PlaybackState implements Parcelable {
public static final long ACTION_SKIP_TO_QUEUE_ITEM = 1 << 12;
/**
+ * Indicates this session supports the play from URI command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_FROM_URI = 1 << 13;
+
+ /**
* This is the default playback state and indicates that no media has been
* added yet, or the performer has been reset and has no content to play.
*
@@ -355,6 +360,11 @@ public final class PlaybackState implements Parcelable {
* <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
* <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
* <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
* </ul>
*/
public long getActions() {
@@ -870,6 +880,11 @@ public final class PlaybackState implements Parcelable {
* <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
* <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
* <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
* </ul>
*
* @param actions The set of actions allowed.
diff --git a/media/java/android/media/tv/DvbDeviceInfo.aidl b/media/java/android/media/tv/DvbDeviceInfo.aidl
new file mode 100644
index 0000000..4851050
--- /dev/null
+++ b/media/java/android/media/tv/DvbDeviceInfo.aidl
@@ -0,0 +1,20 @@
+/*
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+parcelable DvbDeviceInfo; \ No newline at end of file
diff --git a/media/java/android/media/tv/DvbDeviceInfo.java b/media/java/android/media/tv/DvbDeviceInfo.java
new file mode 100644
index 0000000..1885a34
--- /dev/null
+++ b/media/java/android/media/tv/DvbDeviceInfo.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.SystemApi;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.media.tv.TvInputManager;
+import android.util.Log;
+
+import java.lang.IllegalArgumentException;
+
+/**
+ * Simple container for information about DVB device.
+ * Not for third-party developers.
+ *
+ * @hide
+ */
+public final class DvbDeviceInfo implements Parcelable {
+ static final String TAG = "DvbDeviceInfo";
+
+ public static final Parcelable.Creator<DvbDeviceInfo> CREATOR =
+ new Parcelable.Creator<DvbDeviceInfo>() {
+ @Override
+ public DvbDeviceInfo createFromParcel(Parcel source) {
+ try {
+ return new DvbDeviceInfo(source);
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating DvbDeviceInfo from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public DvbDeviceInfo[] newArray(int size) {
+ return new DvbDeviceInfo[size];
+ }
+ };
+
+ private final int mAdapterId;
+ private final int mDeviceId;
+
+ private DvbDeviceInfo(Parcel source) {
+ mAdapterId = source.readInt();
+ mDeviceId = source.readInt();
+ }
+
+ /**
+ * Constructs a new {@link DvbDeviceInfo} with the given adapter ID and device ID.
+ */
+ public DvbDeviceInfo(int adapterId, int deviceId) {
+ mAdapterId = adapterId;
+ mDeviceId = deviceId;
+ }
+
+ /**
+ * Returns the adapter ID of DVB device, in terms of enumerating the DVB device adapters
+ * installed in the system. The adapter ID counts from zero.
+ */
+ public int getAdapterId() {
+ return mAdapterId;
+ }
+
+ /**
+ * Returns the device ID of DVB device, in terms of enumerating the DVB devices attached to
+ * the same device adapter. The device ID counts from zero.
+ */
+ public int getDeviceId() {
+ return mDeviceId;
+ }
+
+ // Parcelable
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mAdapterId);
+ dest.writeInt(mDeviceId);
+ }
+}
diff --git a/media/java/android/media/tv/ITvInputClient.aidl b/media/java/android/media/tv/ITvInputClient.aidl
index 7a023d6..86c0e5d 100644
--- a/media/java/android/media/tv/ITvInputClient.aidl
+++ b/media/java/android/media/tv/ITvInputClient.aidl
@@ -40,4 +40,7 @@ oneway interface ITvInputClient {
void onContentAllowed(int seq);
void onContentBlocked(in String rating, int seq);
void onLayoutSurface(int left, int top, int right, int bottom, int seq);
+ void onTimeShiftStatusChanged(int status, int seq);
+ void onTimeShiftStartPositionChanged(long timeMs, int seq);
+ void onTimeShiftCurrentPositionChanged(long timeMs, int seq);
}
diff --git a/media/java/android/media/tv/ITvInputManager.aidl b/media/java/android/media/tv/ITvInputManager.aidl
index 21549c9..f8057db 100644
--- a/media/java/android/media/tv/ITvInputManager.aidl
+++ b/media/java/android/media/tv/ITvInputManager.aidl
@@ -18,6 +18,8 @@ package android.media.tv;
import android.content.ComponentName;
import android.graphics.Rect;
+import android.media.PlaybackParams;
+import android.media.tv.DvbDeviceInfo;
import android.media.tv.ITvInputClient;
import android.media.tv.ITvInputHardware;
import android.media.tv.ITvInputHardwareCallback;
@@ -29,6 +31,7 @@ import android.media.tv.TvStreamConfig;
import android.media.tv.TvTrackInfo;
import android.net.Uri;
import android.os.Bundle;
+import android.os.ParcelFileDescriptor;
import android.view.Surface;
/**
@@ -72,7 +75,13 @@ interface ITvInputManager {
void relayoutOverlayView(in IBinder sessionToken, in Rect frame, int userId);
void removeOverlayView(in IBinder sessionToken, int userId);
- void requestUnblockContent(in IBinder sessionToken, in String unblockedRating, int userId);
+ void unblockContent(in IBinder sessionToken, in String unblockedRating, int userId);
+
+ void timeShiftPause(in IBinder sessionToken, int userId);
+ void timeShiftResume(in IBinder sessionToken, int userId);
+ void timeShiftSeekTo(in IBinder sessionToken, long timeMs, int userId);
+ void timeShiftSetPlaybackParams(in IBinder sessionToken, in PlaybackParams params, int userId);
+ void timeShiftEnablePositionTracking(in IBinder sessionToken, boolean enable, int userId);
// For TV input hardware binding
List<TvInputHardwareInfo> getHardwareList();
@@ -85,4 +94,8 @@ interface ITvInputManager {
boolean captureFrame(in String inputId, in Surface surface, in TvStreamConfig config,
int userId);
boolean isSingleSessionActive(int userId);
+
+ // For DVB device binding
+ List<DvbDeviceInfo> getDvbDeviceList();
+ ParcelFileDescriptor openDvbDevice(in DvbDeviceInfo info, int device);
}
diff --git a/media/java/android/media/tv/ITvInputSession.aidl b/media/java/android/media/tv/ITvInputSession.aidl
index 1aad2fa..6a06b8f 100644
--- a/media/java/android/media/tv/ITvInputSession.aidl
+++ b/media/java/android/media/tv/ITvInputSession.aidl
@@ -17,6 +17,7 @@
package android.media.tv;
import android.graphics.Rect;
+import android.media.PlaybackParams;
import android.media.tv.TvTrackInfo;
import android.net.Uri;
import android.os.Bundle;
@@ -45,5 +46,11 @@ oneway interface ITvInputSession {
void relayoutOverlayView(in Rect frame);
void removeOverlayView();
- void requestUnblockContent(in String unblockedRating);
+ void unblockContent(in String unblockedRating);
+
+ void timeShiftPause();
+ void timeShiftResume();
+ void timeShiftSeekTo(long timeMs);
+ void timeShiftSetPlaybackParams(in PlaybackParams params);
+ void timeShiftEnablePositionTracking(boolean enable);
}
diff --git a/media/java/android/media/tv/ITvInputSessionCallback.aidl b/media/java/android/media/tv/ITvInputSessionCallback.aidl
index 063d10d..e936810 100644
--- a/media/java/android/media/tv/ITvInputSessionCallback.aidl
+++ b/media/java/android/media/tv/ITvInputSessionCallback.aidl
@@ -37,4 +37,7 @@ oneway interface ITvInputSessionCallback {
void onContentAllowed();
void onContentBlocked(in String rating);
void onLayoutSurface(int left, int top, int right, int bottom);
+ void onTimeShiftStatusChanged(int status);
+ void onTimeShiftStartPositionChanged(long timeMs);
+ void onTimeShiftCurrentPositionChanged(long timeMs);
}
diff --git a/media/java/android/media/tv/ITvInputSessionWrapper.java b/media/java/android/media/tv/ITvInputSessionWrapper.java
index 94c9690..fed0ddf 100644
--- a/media/java/android/media/tv/ITvInputSessionWrapper.java
+++ b/media/java/android/media/tv/ITvInputSessionWrapper.java
@@ -18,6 +18,7 @@ package android.media.tv;
import android.content.Context;
import android.graphics.Rect;
+import android.media.PlaybackParams;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
@@ -41,8 +42,9 @@ import com.android.internal.os.SomeArgs;
public class ITvInputSessionWrapper extends ITvInputSession.Stub implements HandlerCaller.Callback {
private static final String TAG = "TvInputSessionWrapper";
- private static final int MESSAGE_HANDLING_DURATION_THRESHOLD_MILLIS = 50;
- private static final int MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS = 2000;
+ private static final int EXECUTE_MESSAGE_TIMEOUT_SHORT_MILLIS = 50;
+ private static final int EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS = 2000;
+ private static final int EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS = 5 * 1000;
private static final int DO_RELEASE = 1;
private static final int DO_SET_MAIN = 2;
@@ -56,7 +58,12 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
private static final int DO_CREATE_OVERLAY_VIEW = 10;
private static final int DO_RELAYOUT_OVERLAY_VIEW = 11;
private static final int DO_REMOVE_OVERLAY_VIEW = 12;
- private static final int DO_REQUEST_UNBLOCK_CONTENT = 13;
+ private static final int DO_UNBLOCK_CONTENT = 13;
+ private static final int DO_TIME_SHIFT_PAUSE = 14;
+ private static final int DO_TIME_SHIFT_RESUME = 15;
+ private static final int DO_TIME_SHIFT_SEEK_TO = 16;
+ private static final int DO_TIME_SHIFT_SET_PLAYBACK_PARAMS = 17;
+ private static final int DO_TIME_SHIFT_ENABLE_POSITION_TRACKING = 18;
private final HandlerCaller mCaller;
@@ -149,24 +156,48 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
mTvInputSessionImpl.removeOverlayView(true);
break;
}
- case DO_REQUEST_UNBLOCK_CONTENT: {
+ case DO_UNBLOCK_CONTENT: {
mTvInputSessionImpl.unblockContent((String) msg.obj);
break;
}
+ case DO_TIME_SHIFT_PAUSE: {
+ mTvInputSessionImpl.timeShiftPause();
+ break;
+ }
+ case DO_TIME_SHIFT_RESUME: {
+ mTvInputSessionImpl.timeShiftResume();
+ break;
+ }
+ case DO_TIME_SHIFT_SEEK_TO: {
+ mTvInputSessionImpl.timeShiftSeekTo((Long) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_SET_PLAYBACK_PARAMS: {
+ mTvInputSessionImpl.timeShiftSetPlaybackParams((PlaybackParams) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_ENABLE_POSITION_TRACKING: {
+ mTvInputSessionImpl.timeShiftEnablePositionTracking((Boolean) msg.obj);
+ break;
+ }
default: {
Log.w(TAG, "Unhandled message code: " + msg.what);
break;
}
}
long duration = System.currentTimeMillis() - startTime;
- if (duration > MESSAGE_HANDLING_DURATION_THRESHOLD_MILLIS) {
+ if (duration > EXECUTE_MESSAGE_TIMEOUT_SHORT_MILLIS) {
Log.w(TAG, "Handling message (" + msg.what + ") took too long time (duration="
+ duration + "ms)");
- if (msg.what == DO_TUNE && duration > MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS) {
+ if (msg.what == DO_TUNE && duration > EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS) {
throw new RuntimeException("Too much time to handle tune request. (" + duration
- + "ms > " + MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS + "ms) "
+ + "ms > " + EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS + "ms) "
+ "Consider handling the tune request in a separate thread.");
}
+ if (duration > EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS) {
+ throw new RuntimeException("Too much time to handle a request. (type=" + msg.what +
+ ", " + duration + "ms > " + EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS + "ms).");
+ }
}
}
@@ -237,9 +268,37 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
}
@Override
- public void requestUnblockContent(String unblockedRating) {
+ public void unblockContent(String unblockedRating) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(
+ DO_UNBLOCK_CONTENT, unblockedRating));
+ }
+
+ @Override
+ public void timeShiftPause() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_PAUSE));
+ }
+
+ @Override
+ public void timeShiftResume() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_RESUME));
+ }
+
+ @Override
+ public void timeShiftSeekTo(long timeMs) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_TIME_SHIFT_SEEK_TO,
+ Long.valueOf(timeMs)));
+ }
+
+ @Override
+ public void timeShiftSetPlaybackParams(PlaybackParams params) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_TIME_SHIFT_SET_PLAYBACK_PARAMS,
+ params));
+ }
+
+ @Override
+ public void timeShiftEnablePositionTracking(boolean enable) {
mCaller.executeOrSendMessage(mCaller.obtainMessageO(
- DO_REQUEST_UNBLOCK_CONTENT, unblockedRating));
+ DO_TIME_SHIFT_ENABLE_POSITION_TRACKING, Boolean.valueOf(enable)));
}
private final class TvInputEventReceiver extends InputEventReceiver {
diff --git a/media/java/android/media/tv/TvContentRating.java b/media/java/android/media/tv/TvContentRating.java
index c539290..043b80e 100644
--- a/media/java/android/media/tv/TvContentRating.java
+++ b/media/java/android/media/tv/TvContentRating.java
@@ -16,9 +16,12 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.text.TextUtils;
+import com.android.internal.util.Preconditions;
+
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -32,11 +35,11 @@ import java.util.Objects;
* To create a {@code TvContentRating} object, use the
* {@link #createRating TvContentRating.createRating} method with valid rating system string
* constants.
- * <p>
- * It is possible for an application to define its own content rating system by supplying a content
- * rating system definition XML resource (see example below) and declaring a broadcast receiver that
- * filters {@link TvInputManager#ACTION_QUERY_CONTENT_RATING_SYSTEMS} in its manifest.
- * </p>
+ *
+ * <p>It is possible for an application to define its own content rating system by supplying a
+ * content rating system definition XML resource (see example below) and declaring a broadcast
+ * receiver that filters {@link TvInputManager#ACTION_QUERY_CONTENT_RATING_SYSTEMS} in its manifest.
+ *
* <h3> Example: Rating system definition for the TV Parental Guidelines</h3>
* The following XML example shows how the TV Parental Guidelines in the United States can be
* defined:
@@ -120,15 +123,16 @@ import java.util.Objects;
* <rating android:name="US_TV_MA" />
* </rating-order>
* </rating-system-definition>
- * </rating-system-definitions>}</pre></p>
+ * </rating-system-definitions>}</pre>
*
* <h3>System defined rating strings</h3>
* The following strings are defined by the system to provide a standard way to create
* {@code TvContentRating} objects.
+ *
* <p>For example, to create an object that represents TV-PG rating with suggestive dialogue and
* coarse language from the TV Parental Guidelines in the United States, one can use the following
* code snippet:
- * </p>
+ *
* <pre>
* TvContentRating rating = TvContentRating.createRating(
* "com.android.tv",
@@ -167,6 +171,14 @@ import java.util.Objects;
* <td>TV content rating system for Brazil</td>
* </tr>
* <tr>
+ * <td>CA_TV_EN</td>
+ * <td>TV content rating system for Canada (English)</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR</td>
+ * <td>TV content rating system for Canada (French)</td>
+ * </tr>
+ * <tr>
* <td>DVB</td>
* <td>DVB content rating system</td>
* </tr>
@@ -191,6 +203,10 @@ import java.util.Objects;
* <td>TV content rating system for Singapore</td>
* </tr>
* <tr>
+ * <td>US_MV</td>
+ * <td>Movie content rating system for the United States</td>
+ * </tr>
+ * <tr>
* <td>US_TV</td>
* <td>TV content rating system for the United States</td>
* </tr>
@@ -286,6 +302,60 @@ import java.util.Objects;
* <td>Content suitable for viewers over the age of 18</td>
* </tr>
* <tr>
+ * <td valign="top" rowspan="7">CA_TV_EN</td>
+ * <td>CA_TV_EN_EXEMPT</td>
+ * <td>Exempt from ratings</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_C</td>
+ * <td>Suitable for children ages 2&#8211;7</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_C8</td>
+ * <td>Suitable for children ages 8 and older</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_G</td>
+ * <td>Suitable for the entire family</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_PG</td>
+ * <td>May contain moderate violence, profanity, nudity, and sexual references</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_14</td>
+ * <td>Intended for viewers ages 14 and older</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_18</td>
+ * <td>Intended for viewers ages 18 and older</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="6">CA_TV_FR</td>
+ * <td>CA_TV_FR_E</td>
+ * <td>Exempt from ratings</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_G</td>
+ * <td>Appropriate for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_8</td>
+ * <td>Appropriate for children 8</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_13</td>
+ * <td>Suitable for children 13</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_16</td>
+ * <td>Recommended for children over the age of 16</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_18</td>
+ * <td>Only to be viewed by adults</td>
+ * </tr>
+ * <tr>
* <td valign="top" rowspan="15">DVB</td>
* <td>DVB_4</td>
* <td>Recommended for ages 4 and over</td>
@@ -604,6 +674,27 @@ import java.util.Objects;
* <td>Suitable for adults aged 21 and above</td>
* </tr>
* <tr>
+ * <td valign="top" rowspan="5">US_MV</td>
+ * <td>US_MV_G</td>
+ * <td>General audiences</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_PG</td>
+ * <td>Parental guidance suggested</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_PG13</td>
+ * <td>Parents strongly cautioned</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_R</td>
+ * <td>Restricted, under 17 requires accompanying parent or adult guardian</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_NC17</td>
+ * <td>No one 17 and under admitted</td>
+ * </tr>
+ * <tr>
* <td valign="top" rowspan="6">US_TV</td>
* <td>US_TV_Y</td>
* <td>This program is designed to be appropriate for all children</td>
@@ -692,6 +783,17 @@ public final class TvContentRating {
private final int mHashCode;
/**
+ * Rating constant denoting unrated content. Used to handle the case where the content rating
+ * information is missing.
+ *
+ * <p>TV input services can call {@link TvInputManager#isRatingBlocked} with this constant to
+ * determine whether they should block unrated content. The subsequent call to
+ * {@link TvInputService.Session#notifyContentBlocked} with the same constant notifies
+ * applications that the current program content is blocked by parental controls.
+ */
+ public static final TvContentRating UNRATED = new TvContentRating("null", "null", "null", null);
+
+ /**
* Creates a {@code TvContentRating} object with predefined content rating strings.
*
* @param domain The domain string. For example, "com.android.tv".
@@ -823,20 +925,17 @@ public final class TvContentRating {
/**
* Returns {@code true} if this rating has the same main rating as the specified rating and when
* this rating's sub-ratings contain the other's.
- * <p>
- * For example, a {@code TvContentRating} object that represents TV-PG with S(Sexual content)
- * and V(Violence) contains TV-PG, TV-PG/S, TV-PG/V and itself.
- * </p>
+ *
+ * <p>For example, a {@code TvContentRating} object that represents TV-PG with
+ * S(Sexual content) and V(Violence) contains TV-PG, TV-PG/S, TV-PG/V and itself.
*
* @param rating The {@link TvContentRating} to check.
* @return {@code true} if this object contains {@code rating}, {@code false} otherwise.
* @hide
*/
@SystemApi
- public final boolean contains(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public final boolean contains(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
if (!rating.getMainRating().equals(mRating)) {
return false;
}
diff --git a/media/java/android/media/tv/TvContract.java b/media/java/android/media/tv/TvContract.java
index 5b92266..f5a6f2b 100644
--- a/media/java/android/media/tv/TvContract.java
+++ b/media/java/android/media/tv/TvContract.java
@@ -16,6 +16,7 @@
package android.media.tv;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.content.ComponentName;
import android.content.ContentResolver;
@@ -30,15 +31,13 @@ import java.util.List;
import java.util.Map;
/**
- * <p>
* The contract between the TV provider and applications. Contains definitions for the supported
* URIs and columns.
- * </p>
* <h3>Overview</h3>
- * <p>
- * TvContract defines a basic database of TV content metadata such as channel and program
+ *
+ * <p>TvContract defines a basic database of TV content metadata such as channel and program
* information. The information is stored in {@link Channels} and {@link Programs} tables.
- * </p>
+ *
* <ul>
* <li>A row in the {@link Channels} table represents information about a TV channel. The data
* format can vary greatly from standard to standard or according to service provider, thus
@@ -156,7 +155,7 @@ public final class TvContract {
* @param inputId The ID of the TV input to build a channels URI for. If {@code null}, builds a
* URI for all the TV inputs.
*/
- public static final Uri buildChannelsUriForInput(String inputId) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId) {
return buildChannelsUriForInput(inputId, false);
}
@@ -171,7 +170,8 @@ public final class TvContract {
* @hide
*/
@SystemApi
- public static final Uri buildChannelsUriForInput(String inputId, boolean browsableOnly) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId,
+ boolean browsableOnly) {
Uri.Builder builder = Channels.CONTENT_URI.buildUpon();
if (inputId != null) {
builder.appendQueryParameter(PARAM_INPUT, inputId);
@@ -193,8 +193,8 @@ public final class TvContract {
* @hide
*/
@SystemApi
- public static final Uri buildChannelsUriForInput(String inputId, String genre,
- boolean browsableOnly) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId,
+ @Nullable String genre, boolean browsableOnly) {
if (genre == null) {
return buildChannelsUriForInput(inputId, browsableOnly);
}
@@ -333,13 +333,12 @@ public final class TvContract {
public interface BaseTvColumns extends BaseColumns {
/**
* The name of the package that owns a row in each table.
- * <p>
- * The TV provider fills it in with the name of the package that provides the initial data
+ *
+ * <p>The TV provider fills it in with the name of the package that provides the initial data
* of that row. If the package is later uninstalled, the rows it owns are automatically
* removed from the tables.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_PACKAGE_NAME = "package_name";
}
@@ -509,181 +508,171 @@ public final class TvContract {
* is not defined for the given video format.
* @see #COLUMN_VIDEO_FORMAT
*/
+ @Nullable
public static final String getVideoResolution(String videoFormat) {
return VIDEO_FORMAT_TO_RESOLUTION_MAP.get(videoFormat);
}
/**
* The ID of the TV input service that provides this TV channel.
- * <p>
- * Use {@link #buildInputId} to build the ID.
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Use {@link #buildInputId} to build the ID.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INPUT_ID = "input_id";
/**
* The predefined type of this TV channel.
- * <p>
- * This is primarily used to indicate which broadcast standard (e.g. ATSC, DVB or ISDB) the
- * current channel conforms to. The value should match to one of the followings:
+ *
+ * <p>This is primarily used to indicate which broadcast standard (e.g. ATSC, DVB or ISDB)
+ * the current channel conforms to. The value should match to one of the followings:
* {@link #TYPE_OTHER}, {@link #TYPE_DVB_T}, {@link #TYPE_DVB_T2}, {@link #TYPE_DVB_S},
* {@link #TYPE_DVB_S2}, {@link #TYPE_DVB_C}, {@link #TYPE_DVB_C2}, {@link #TYPE_DVB_H},
* {@link #TYPE_DVB_SH}, {@link #TYPE_ATSC_T}, {@link #TYPE_ATSC_C},
* {@link #TYPE_ATSC_M_H}, {@link #TYPE_ISDB_T}, {@link #TYPE_ISDB_TB},
* {@link #TYPE_ISDB_S}, {@link #TYPE_ISDB_C}, {@link #TYPE_1SEG}, {@link #TYPE_DTMB},
* {@link #TYPE_CMMB}, {@link #TYPE_T_DMB}, {@link #TYPE_S_DMB}
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_TYPE = "type";
/**
* The predefined service type of this TV channel.
- * <p>
- * This is primarily used to indicate whether the current channel is a regular TV channel or
- * a radio-like channel. Use the same coding for {@code service_type} in the underlying
+ *
+ * <p>This is primarily used to indicate whether the current channel is a regular TV channel
+ * or a radio-like channel. Use the same coding for {@code service_type} in the underlying
* broadcast standard if it is defined there (e.g. ATSC A/53, ETSI EN 300 468 and ARIB
* STD-B10). Otherwise use one of the followings: {@link #SERVICE_TYPE_OTHER},
* {@link #SERVICE_TYPE_AUDIO_VIDEO}, {@link #SERVICE_TYPE_AUDIO}
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_SERVICE_TYPE = "service_type";
/**
* The original network ID of this TV channel.
- * <p>
- * This is used to identify the originating delivery system, if applicable. Use the same
+ *
+ * <p>This is used to identify the originating delivery system, if applicable. Use the same
* coding for {@code original_network_id} in the underlying broadcast standard if it is
* defined there (e.g. ETSI EN 300 468/TR 101 211 and ARIB STD-B10). If channels cannot be
* globally identified by 2-tuple {{@link #COLUMN_TRANSPORT_STREAM_ID},
* {@link #COLUMN_SERVICE_ID}}, one must carefully assign a value to this field to form a
* unique 3-tuple identification {{@link #COLUMN_ORIGINAL_NETWORK_ID},
* {@link #COLUMN_TRANSPORT_STREAM_ID}, {@link #COLUMN_SERVICE_ID}} for its channels.
- * </p><p>
- * This is a required field if the channel cannot be uniquely identified by a 2-tuple
+ *
+ * <p>This is a required field if the channel cannot be uniquely identified by a 2-tuple
* {{@link #COLUMN_TRANSPORT_STREAM_ID}, {@link #COLUMN_SERVICE_ID}}.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_ORIGINAL_NETWORK_ID = "original_network_id";
/**
* The transport stream ID of this channel.
- * <p>
- * This is used to identify the Transport Stream that contains the current channel from any
- * other multiplex within a network, if applicable. Use the same coding for
+ *
+ * <p>This is used to identify the Transport Stream that contains the current channel from
+ * any other multiplex within a network, if applicable. Use the same coding for
* {@code transport_stream_id} defined in ISO/IEC 13818-1 if the channel is transmitted via
* the MPEG Transport Stream as is the case for many digital broadcast standards.
- * </p><p>
- * This is a required field if the current channel is transmitted via the MPEG Transport
+ *
+ * <p>This is a required field if the current channel is transmitted via the MPEG Transport
* Stream.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_TRANSPORT_STREAM_ID = "transport_stream_id";
/**
* The service ID of this channel.
- * <p>
- * This is used to identify the current service (roughly equivalent to channel) from any
+ *
+ * <p>This is used to identify the current service (roughly equivalent to channel) from any
* other service within the Transport Stream, if applicable. Use the same coding for
* {@code service_id} in the underlying broadcast standard if it is defined there (e.g. ETSI
* EN 300 468 and ARIB STD-B10) or {@code program_number} (which usually has the same value
* as {@code service_id}) in ISO/IEC 13818-1 if the channel is transmitted via the MPEG
* Transport Stream.
- * </p><p>
- * This is a required field if the current channel is transmitted via the MPEG Transport
+ *
+ * <p>This is a required field if the current channel is transmitted via the MPEG Transport
* Stream.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_SERVICE_ID = "service_id";
/**
* The channel number that is displayed to the user.
- * <p>
- * The format can vary depending on broadcast standard and product specification.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>The format can vary depending on broadcast standard and product specification.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DISPLAY_NUMBER = "display_number";
/**
* The channel name that is displayed to the user.
- * <p>
- * A call sign is a good candidate to use for this purpose but any name that helps the user
- * recognize the current channel will be enough. Can also be empty depending on broadcast
- * standard.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>A call sign is a good candidate to use for this purpose but any name that helps the
+ * user recognize the current channel will be enough. Can also be empty depending on
+ * broadcast standard.
+ *
+ * <p> Type: TEXT
*/
public static final String COLUMN_DISPLAY_NAME = "display_name";
/**
* The network affiliation for this TV channel.
- * <p>
- * This is used to identify a channel that is commonly called by its network affiliation
+ *
+ * <p>This is used to identify a channel that is commonly called by its network affiliation
* instead of the display name. Examples include ABC for the channel KGO-HD, FOX for the
* channel KTVU-HD and NBC for the channel KNTV-HD. Can be empty if not applicable.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_NETWORK_AFFILIATION = "network_affiliation";
/**
* The description of this TV channel.
- * <p>
- * Can be empty initially.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty initially.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DESCRIPTION = "description";
/**
* The typical video format for programs from this TV channel.
- * <p>
- * This is primarily used to filter out channels based on video format by applications. The
- * value should match one of the followings: {@link #VIDEO_FORMAT_240P},
+ *
+ * <p>This is primarily used to filter out channels based on video format by applications.
+ * The value should match one of the followings: {@link #VIDEO_FORMAT_240P},
* {@link #VIDEO_FORMAT_360P}, {@link #VIDEO_FORMAT_480I}, {@link #VIDEO_FORMAT_480P},
* {@link #VIDEO_FORMAT_576I}, {@link #VIDEO_FORMAT_576P}, {@link #VIDEO_FORMAT_720P},
* {@link #VIDEO_FORMAT_1080I}, {@link #VIDEO_FORMAT_1080P}, {@link #VIDEO_FORMAT_2160P},
* {@link #VIDEO_FORMAT_4320P}. Note that the actual video resolution of each program from a
* given channel can vary thus one should use {@link Programs#COLUMN_VIDEO_WIDTH} and
* {@link Programs#COLUMN_VIDEO_HEIGHT} to get more accurate video resolution.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
+ *
* @see #getVideoResolution
*/
public static final String COLUMN_VIDEO_FORMAT = "video_format";
/**
* The flag indicating whether this TV channel is browsable or not.
- * <p>
- * A value of 1 indicates the channel is included in the channel list that applications use
- * to browse channels, a value of 0 indicates the channel is not included in the list. If
- * not specified, this value is set to 0 (not browsable) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>A value of 1 indicates the channel is included in the channel list that applications
+ * use to browse channels, a value of 0 indicates the channel is not included in the list.
+ * If not specified, this value is set to 0 (not browsable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
* @hide
*/
@SystemApi
@@ -691,31 +680,29 @@ public final class TvContract {
/**
* The flag indicating whether this TV channel is searchable or not.
- * <p>
- * In some regions, it is not allowed to surface search results for a given channel without
- * broadcaster's consent. This is used to impose such restriction. Channels marked with
- * "not searchable" cannot be used by other services except for the system service that
+ *
+ * <p>In some regions, it is not allowed to surface search results for a given channel
+ * without broadcaster's consent. This is used to impose such restriction. Channels marked
+ * with "not searchable" cannot be used by other services except for the system service that
* shows the TV content. A value of 1 indicates the channel is searchable and can be
* included in search results, a value of 0 indicates the channel and its TV programs are
* hidden from search. If not specified, this value is set to 1 (searchable) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>Type: INTEGER (boolean)
*/
public static final String COLUMN_SEARCHABLE = "searchable";
/**
* The flag indicating whether this TV channel is locked or not.
- * <p>
- * This is primarily used for alternative parental control to prevent unauthorized users
+ *
+ * <p>This is primarily used for alternative parental control to prevent unauthorized users
* from watching the current channel regardless of the content rating. A value of 1
* indicates the channel is locked and the user is required to enter passcode to unlock it
* in order to watch the current program from the channel, a value of 0 indicates the
* channel is not locked thus the user is not prompted to enter passcode If not specified,
* this value is set to 0 (not locked) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>Type: INTEGER (boolean)
* @hide
*/
@SystemApi
@@ -723,25 +710,63 @@ public final class TvContract {
/**
* Internal data used by individual TV input services.
- * <p>
- * This is internal to the provider that inserted it, and should not be decoded by other
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
* apps.
- * </p><p>
- * Type: BLOB
- * </p>
+ *
+ * <p>Type: BLOB
*/
public static final String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
/**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
* The version number of this row entry used by TV input services.
- * <p>
- * This is best used by sync adapters to identify the rows to update. The number can be
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
* defined by individual TV input services. One may assign the same value as
* {@code version_number} that appears in ETSI EN 300 468 or ATSC A/65, if the data are
* coming from a TV broadcast.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VERSION_NUMBER = "version_number";
@@ -749,17 +774,18 @@ public final class TvContract {
/**
* A sub-directory of a single TV channel that represents its primary logo.
- * <p>
- * To access this directory, append {@link Channels.Logo#CONTENT_DIRECTORY} to the raw
+ *
+ * <p>To access this directory, append {@link Channels.Logo#CONTENT_DIRECTORY} to the raw
* channel URI. The resulting URI represents an image file, and should be interacted
* using ContentResolver.openAssetFileDescriptor.
- * </p><p>
- * Note that this sub-directory also supports opening the logo as an asset file in write
+ *
+ * <p>Note that this sub-directory also supports opening the logo as an asset file in write
* mode. Callers can create or replace the primary logo associated with this channel by
- * opening the asset file and writing the full-size photo contents into it. When the file
- * is closed, the image will be parsed, sized down if necessary, and stored.
- * </p><p>
- * Usage example:
+ * opening the asset file and writing the full-size photo contents into it. (Make sure there
+ * is no padding around the logo image.) When the file is closed, the image will be parsed,
+ * sized down if necessary, and stored.
+ *
+ * <p>Usage example:
* <pre>
* public void writeChannelLogo(long channelId, byte[] logo) {
* Uri channelLogoUri = TvContract.buildChannelLogoUri(channelId);
@@ -775,7 +801,6 @@ public final class TvContract {
* }
* }
* </pre>
- * </p>
*/
public static final class Logo {
@@ -788,7 +813,12 @@ public final class TvContract {
}
}
- /** Column definitions for the TV programs table. */
+ /**
+ * Column definitions for the TV programs table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link Programs#COLUMN_START_TIME_UTC_MILLIS} in ascending order.
+ */
public static final class Programs implements BaseTvColumns {
/** The content:// style URI for this table. */
@@ -803,166 +833,153 @@ public final class TvContract {
/**
* The ID of the TV channel that provides this TV program.
- * <p>
- * This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_CHANNEL_ID = "channel_id";
/**
* The title of this TV program.
- * <p>
- * If this program is an episodic TV show, it is recommended that the title is the series
+ *
+ * <p>If this program is an episodic TV show, it is recommended that the title is the series
* title and its related fields ({@link #COLUMN_SEASON_NUMBER},
* {@link #COLUMN_EPISODE_NUMBER}, and {@link #COLUMN_EPISODE_TITLE}) are filled in.
- * </p><p>
- * Type: TEXT
- * </p>
- **/
+ *
+ * <p>Type: TEXT
+ */
public static final String COLUMN_TITLE = "title";
/**
* The season number of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: INTEGER
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
public static final String COLUMN_SEASON_NUMBER = "season_number";
/**
* The episode number of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: INTEGER
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
public static final String COLUMN_EPISODE_NUMBER = "episode_number";
/**
* The episode title of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
public static final String COLUMN_EPISODE_TITLE = "episode_title";
/**
* The start time of this TV program, in milliseconds since the epoch.
- * <p>
- * The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
+ *
+ * <p>The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
* previous program in the same channel.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
/**
* The end time of this TV program, in milliseconds since the epoch.
- * <p>
- * The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
+ *
+ * <p>The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
* next program in the same channel.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
/**
* The comma-separated genre string of this TV program.
- * <p>
- * Use the same language appeared in the underlying broadcast standard, if applicable. (For
- * example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
+ *
+ * <p>Use the same language appeared in the underlying broadcast standard, if applicable.
+ * (For example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
* Content Descriptor of ETSI EN 300 468, if appropriate.) Otherwise, leave empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_BROADCAST_GENRE = "broadcast_genre";
/**
* The comma-separated canonical genre string of this TV program.
- * <p>
- * Canonical genres are defined in {@link Genres}. Use {@link Genres#encode Genres.encode()}
- * to create a text that can be stored in this column. Use {@link Genres#decode
- * Genres.decode()} to get the canonical genre strings from the text stored in this column.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Canonical genres are defined in {@link Genres}. Use
+ * {@link Genres#encode Genres.encode()} to create a text that can be stored in this column.
+ * Use {@link Genres#decode Genres.decode()} to get the canonical genre strings from the
+ * text stored in this column.
+ *
+ * <p>Type: TEXT
* @see Genres
*/
public static final String COLUMN_CANONICAL_GENRE = "canonical_genre";
/**
* The short description of this TV program that is displayed to the user by default.
- * <p>
- * It is recommended to limit the length of the descriptions to 256 characters.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>It is recommended to limit the length of the descriptions to 256 characters.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_SHORT_DESCRIPTION = "short_description";
/**
* The detailed, lengthy description of this TV program that is displayed only when the user
* wants to see more information.
- * <p>
- * TV input services should leave this field empty if they have no additional details beyond
- * {@link #COLUMN_SHORT_DESCRIPTION}.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>TV input services should leave this field empty if they have no additional details
+ * beyond {@link #COLUMN_SHORT_DESCRIPTION}.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_LONG_DESCRIPTION = "long_description";
/**
* The width of the video for this TV program, in the unit of pixels.
- * <p>
- * Together with {@link #COLUMN_VIDEO_HEIGHT} this is used to determine the video resolution
- * of the current TV program. Can be empty if it is not known initially or the program does
- * not convey any video such as the programs from type {@link Channels#SERVICE_TYPE_AUDIO}
- * channels.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_HEIGHT} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VIDEO_WIDTH = "video_width";
/**
* The height of the video for this TV program, in the unit of pixels.
- * <p>
- * Together with {@link #COLUMN_VIDEO_WIDTH} this is used to determine the video resolution
- * of the current TV program. Can be empty if it is not known initially or the program does
- * not convey any video such as the programs from type {@link Channels#SERVICE_TYPE_AUDIO}
- * channels.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_WIDTH} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VIDEO_HEIGHT = "video_height";
/**
* The comma-separated audio languages of this TV program.
- * <p>
- * This is used to describe available audio languages included in the program. Use either
+ *
+ * <p>This is used to describe available audio languages included in the program. Use either
* ISO 639-1 or 639-2/T codes.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_AUDIO_LANGUAGE = "audio_language";
/**
* The comma-separated content ratings of this TV program.
- * <p>
- * This is used to describe the content rating(s) of this program. Each comma-separated
+ *
+ * <p>This is used to describe the content rating(s) of this program. Each comma-separated
* content rating sub-string should be generated by calling
* {@link TvContentRating#flattenToString}. Note that in most cases the program content is
* rated by a single rating system, thus resulting in a corresponding single sub-string that
@@ -971,53 +988,88 @@ public final class TvContract {
* specified as "blocked rating" in the user's parental control settings, the TV input
* service should block the current content and wait for the signal that it is okay to
* unblock.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_CONTENT_RATING = "content_rating";
/**
* The URI for the poster art of this TV program.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_POSTER_ART_URI = "poster_art_uri";
/**
* The URI for the thumbnail of this TV program.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_THUMBNAIL_URI = "thumbnail_uri";
/**
* Internal data used by individual TV input services.
- * <p>
- * This is internal to the provider that inserted it, and should not be decoded by other
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
* apps.
- * </p><p>
- * Type: BLOB
- * </p>
+ *
+ * <p>Type: BLOB
*/
public static final String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
/**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
* The version number of this row entry used by TV input services.
- * <p>
- * This is best used by sync adapters to identify the rows to update. The number can be
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
* defined by individual TV input services. One may assign the same value as
* {@code version_number} in ETSI EN 300 468 or ATSC A/65, if the data are coming from a TV
* broadcast.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VERSION_NUMBER = "version_number";
@@ -1144,6 +1196,9 @@ public final class TvContract {
/**
* Column definitions for the TV programs that the user watched. Applications do not have access
* to this table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link WatchedPrograms#COLUMN_WATCH_START_TIME_UTC_MILLIS} in descending order.
* @hide
*/
@SystemApi
@@ -1162,9 +1217,8 @@ public final class TvContract {
/**
* The UTC time that the user started watching this TV program, in milliseconds since the
* epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_WATCH_START_TIME_UTC_MILLIS =
"watch_start_time_utc_millis";
@@ -1172,49 +1226,43 @@ public final class TvContract {
/**
* The UTC time that the user stopped watching this TV program, in milliseconds since the
* epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_WATCH_END_TIME_UTC_MILLIS = "watch_end_time_utc_millis";
/**
* The ID of the TV channel that provides this TV program.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_CHANNEL_ID = "channel_id";
/**
* The title of this TV program.
- * <p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_TITLE = "title";
/**
* The start time of this TV program, in milliseconds since the epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
/**
* The end time of this TV program, in milliseconds since the epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
/**
* The description of this TV program.
- * <p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DESCRIPTION = "description";
@@ -1222,25 +1270,23 @@ public final class TvContract {
* Extra parameters given to {@link TvInputService.Session#tune(Uri, android.os.Bundle)
* TvInputService.Session.tune(Uri, android.os.Bundle)} when tuning to the channel that
* provides this TV program. (Used internally.)
- * <p>
- * This column contains an encoded string that represents comma-separated key-value pairs of
+ *
+ * <p>This column contains an encoded string that represents comma-separated key-value pairs of
* the tune parameters. (Ex. "[key1]=[value1], [key2]=[value2]"). '%' is used as an escape
* character for '%', '=', and ','.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INTERNAL_TUNE_PARAMS = "tune_params";
/**
* The session token of this TV program. (Used internally.)
- * <p>
- * This contains a String representation of {@link IBinder} for
+ *
+ * <p>This contains a String representation of {@link IBinder} for
* {@link TvInputService.Session} that provides the current TV program. It is used
* internally to distinguish watched programs entries from different TV input sessions.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INTERNAL_SESSION_TOKEN = "session_token";
diff --git a/media/java/android/media/tv/TvInputInfo.java b/media/java/android/media/tv/TvInputInfo.java
index b9e99d2..46d33b4 100644
--- a/media/java/android/media/tv/TvInputInfo.java
+++ b/media/java/android/media/tv/TvInputInfo.java
@@ -16,6 +16,7 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.content.ComponentName;
import android.content.Context;
@@ -116,12 +117,13 @@ public final class TvInputInfo implements Parcelable {
private final ResolveInfo mService;
private final String mId;
private final String mParentId;
+ private final int mType;
+ private final boolean mIsHardwareInput;
// Attributes from XML meta data.
private String mSetupActivity;
private String mSettingsActivity;
- private int mType = TYPE_TUNER;
private HdmiDeviceInfo mHdmiDeviceInfo;
private String mLabel;
private Uri mIconUri;
@@ -153,7 +155,7 @@ public final class TvInputInfo implements Parcelable {
throws XmlPullParserException, IOException {
return createTvInputInfo(context, service, generateInputIdForComponentName(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name)),
- null, TYPE_TUNER, null, null, false);
+ null, TYPE_TUNER, false, null, null, false);
}
/**
@@ -177,7 +179,7 @@ public final class TvInputInfo implements Parcelable {
boolean isConnectedToHdmiSwitch = (hdmiDeviceInfo.getPhysicalAddress() & 0x0FFF) != 0;
TvInputInfo input = createTvInputInfo(context, service, generateInputIdForHdmiDevice(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name),
- hdmiDeviceInfo), parentId, TYPE_HDMI, label, iconUri, isConnectedToHdmiSwitch);
+ hdmiDeviceInfo), parentId, TYPE_HDMI, true, label, iconUri, isConnectedToHdmiSwitch);
input.mHdmiDeviceInfo = hdmiDeviceInfo;
return input;
}
@@ -202,12 +204,12 @@ public final class TvInputInfo implements Parcelable {
int inputType = sHardwareTypeToTvInputType.get(hardwareInfo.getType(), TYPE_TUNER);
return createTvInputInfo(context, service, generateInputIdForHardware(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name),
- hardwareInfo), null, inputType, label, iconUri, false);
+ hardwareInfo), null, inputType, true, label, iconUri, false);
}
private static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
- String id, String parentId, int inputType, String label, Uri iconUri,
- boolean isConnectedToHdmiSwitch)
+ String id, String parentId, int inputType, boolean isHardwareInput, String label,
+ Uri iconUri, boolean isConnectedToHdmiSwitch)
throws XmlPullParserException, IOException {
ServiceInfo si = service.serviceInfo;
PackageManager pm = context.getPackageManager();
@@ -233,7 +235,7 @@ public final class TvInputInfo implements Parcelable {
"Meta-data does not start with tv-input-service tag in " + si.name);
}
- TvInputInfo input = new TvInputInfo(service, id, parentId, inputType);
+ TvInputInfo input = new TvInputInfo(service, id, parentId, inputType, isHardwareInput);
TypedArray sa = res.obtainAttributes(attrs,
com.android.internal.R.styleable.TvInputService);
input.mSetupActivity = sa.getString(
@@ -272,12 +274,16 @@ public final class TvInputInfo implements Parcelable {
* @param id ID of this TV input. Should be generated via generateInputId*().
* @param parentId ID of this TV input's parent input. {@code null} if none exists.
* @param type The type of this TV input service.
+ * @param isHardwareInput {@code true} if this TV input represents a hardware device.
+ * {@code false} otherwise.
*/
- private TvInputInfo(ResolveInfo service, String id, String parentId, int type) {
+ private TvInputInfo(ResolveInfo service, String id, String parentId, int type,
+ boolean isHardwareInput) {
mService = service;
mId = id;
mParentId = parentId;
mType = type;
+ mIsHardwareInput = isHardwareInput;
}
/**
@@ -290,18 +296,17 @@ public final class TvInputInfo implements Parcelable {
/**
* Returns the parent input ID.
- * <p>
- * A TV input may have a parent input if the TV input is actually a logical representation of
+ *
+ * <p>A TV input may have a parent input if the TV input is actually a logical representation of
* a device behind the hardware port represented by the parent input.
* For example, a HDMI CEC logical device, connected to a HDMI port, appears as another TV
* input. In this case, the parent input of this logical device is the HDMI port.
- * </p><p>
- * Applications may group inputs by parent input ID to provide an easier access to inputs
+ *
+ * <p>Applications may group inputs by parent input ID to provide an easier access to inputs
* sharing the same physical port. In the example of HDMI CEC, logical HDMI CEC devices behind
* the same HDMI port have the same parent ID, which is the ID representing the port. Thus
* applications can group the hardware HDMI port and the logical HDMI CEC devices behind it
* together using this method.
- * </p>
*
* @return the ID of the parent input, if exists. Returns {@code null} if the parent input is
* not specified.
@@ -381,6 +386,16 @@ public final class TvInputInfo implements Parcelable {
}
/**
+ * Returns {@code true} if this TV input represents a hardware device. (e.g. built-in tuner,
+ * HDMI1) {@code false} otherwise.
+ * @hide
+ */
+ @SystemApi
+ public boolean isHardwareInput() {
+ return mIsHardwareInput;
+ }
+
+ /**
* Returns {@code true}, if a CEC device for this TV input is connected to an HDMI switch, i.e.,
* the device isn't directly connected to a HDMI port.
* @hide
@@ -410,7 +425,7 @@ public final class TvInputInfo implements Parcelable {
* @return a CharSequence containing the TV input's label. If the TV input does not have
* a label, its name is returned.
*/
- public CharSequence loadLabel(Context context) {
+ public CharSequence loadLabel(@NonNull Context context) {
if (TextUtils.isEmpty(mLabel)) {
return mService.loadLabel(context.getPackageManager());
} else {
@@ -438,7 +453,7 @@ public final class TvInputInfo implements Parcelable {
* @return a Drawable containing the TV input's icon. If the TV input does not have an icon,
* application's icon is returned. If it's unavailable too, {@code null} is returned.
*/
- public Drawable loadIcon(Context context) {
+ public Drawable loadIcon(@NonNull Context context) {
if (mIconUri == null) {
return loadServiceIcon(context);
}
@@ -492,13 +507,14 @@ public final class TvInputInfo implements Parcelable {
* @param flags The flags used for parceling.
*/
@Override
- public void writeToParcel(Parcel dest, int flags) {
+ public void writeToParcel(@NonNull Parcel dest, int flags) {
dest.writeString(mId);
dest.writeString(mParentId);
mService.writeToParcel(dest, flags);
dest.writeString(mSetupActivity);
dest.writeString(mSettingsActivity);
dest.writeInt(mType);
+ dest.writeByte(mIsHardwareInput ? (byte) 1 : 0);
dest.writeParcelable(mHdmiDeviceInfo, flags);
dest.writeParcelable(mIconUri, flags);
dest.writeString(mLabel);
@@ -572,6 +588,7 @@ public final class TvInputInfo implements Parcelable {
mSetupActivity = in.readString();
mSettingsActivity = in.readString();
mType = in.readInt();
+ mIsHardwareInput = in.readByte() == 1 ? true : false;
mHdmiDeviceInfo = in.readParcelable(null);
mIconUri = in.readParcelable(null);
mLabel = in.readString();
diff --git a/media/java/android/media/tv/TvInputManager.java b/media/java/android/media/tv/TvInputManager.java
index f55299e..7fc19f1 100644
--- a/media/java/android/media/tv/TvInputManager.java
+++ b/media/java/android/media/tv/TvInputManager.java
@@ -16,14 +16,18 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.graphics.Rect;
+import android.media.PlaybackParams;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
+import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.util.ArrayMap;
import android.util.Log;
@@ -37,6 +41,8 @@ import android.view.KeyEvent;
import android.view.Surface;
import android.view.View;
+import com.android.internal.util.Preconditions;
+
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
@@ -50,8 +56,27 @@ import java.util.Map;
public final class TvInputManager {
private static final String TAG = "TvInputManager";
+ static final int DVB_DEVICE_START = 0;
+ static final int DVB_DEVICE_END = 2;
+
+ /**
+ * A demux device of DVB API for controlling the filters of DVB hardware/software.
+ * @hide
+ */
+ public static final int DVB_DEVICE_DEMUX = DVB_DEVICE_START;
+ /**
+ * A DVR device of DVB API for reading transport streams.
+ * @hide
+ */
+ public static final int DVB_DEVICE_DVR = 1;
+ /**
+ * A frontend device of DVB API for controlling the tuner and DVB demodulator hardware.
+ * @hide
+ */
+ public static final int DVB_DEVICE_FRONTEND = DVB_DEVICE_END;
+
static final int VIDEO_UNAVAILABLE_REASON_START = 0;
- static final int VIDEO_UNAVAILABLE_REASON_END = 3;
+ static final int VIDEO_UNAVAILABLE_REASON_END = 4;
/**
* A generic reason. Video is not available due to an unspecified error.
@@ -69,42 +94,66 @@ public final class TvInputManager {
* Video is not available because the TV input stopped the playback temporarily to buffer more
* data.
*/
- public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = VIDEO_UNAVAILABLE_REASON_END;
+ public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = 3;
+ /**
+ * Video is not available because the current program is audio-only.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY = VIDEO_UNAVAILABLE_REASON_END;
/**
- * The TV input is in unknown state.
- * <p>
- * State for denoting unknown TV input state. The typical use case is when a requested TV
- * input is removed from the device or it is not registered. Used in
- * {@code ITvInputManager.getTvInputState()}.
- * </p>
- * @hide
+ * Status prior to calling {@link TvInputService.Session#notifyTimeShiftStatusChanged}.
*/
- public static final int INPUT_STATE_UNKNOWN = -1;
+ public static final int TIME_SHIFT_STATUS_UNKNOWN = 0;
+
+ /**
+ * The TV input does not support time shifting.
+ */
+ public static final int TIME_SHIFT_STATUS_UNSUPPORTED = 1;
+
+ /**
+ * Time shifting is currently not available but might work again later.
+ */
+ public static final int TIME_SHIFT_STATUS_UNAVAILABLE = 2;
+
+ /**
+ * Time shifting is currently available. In this status, the application assumes it can
+ * pause/resume playback, seek to a specified time position and set playback rate and audio
+ * mode.
+ */
+ public static final int TIME_SHIFT_STATUS_AVAILABLE = 3;
+
+ public static final long TIME_SHIFT_INVALID_TIME = Long.MIN_VALUE;
/**
* The TV input is connected.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ *
+ * <p>This state indicates that a source device is connected to the input port and is in the
+ * normal operation mode. It is mostly relevant to hardware inputs such as HDMI input. This is
+ * the default state for any hardware inputs where their states are unknown. Non-hardware inputs
+ * are considered connected all the time.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_CONNECTED = 0;
/**
- * The TV input is connected but in standby mode. It would take a while until it becomes
- * fully ready.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ * The TV input is connected but in standby mode.
+ *
+ * <p>This state indicates that a source device is connected to the input port but is in standby
+ * mode. It is mostly relevant to hardware inputs such as HDMI input.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_CONNECTED_STANDBY = 1;
/**
* The TV input is disconnected.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ *
+ * <p>This state indicates that a source device is disconnected from the input port. It is
+ * mostly relevant to hardware inputs such as HDMI input.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_DISCONNECTED = 2;
@@ -124,15 +173,17 @@ public final class TvInputManager {
/**
* Broadcast intent action used to query available content rating systems.
- * <p>
- * The TV input manager service locates available content rating systems by querying broadcast
- * receivers that are registered for this action. An application can offer additional content
- * rating systems to the user by declaring a suitable broadcast receiver in its manifest.
- * </p><p>
- * Here is an example broadcast receiver declaration that an application might include in its
+ *
+ * <p>The TV input manager service locates available content rating systems by querying
+ * broadcast receivers that are registered for this action. An application can offer additional
+ * content rating systems to the user by declaring a suitable broadcast receiver in its
+ * manifest.
+ *
+ * <p>Here is an example broadcast receiver declaration that an application might include in its
* AndroidManifest.xml to advertise custom content rating systems. The meta-data specifies a
* resource that contains a description of each content rating system that is provided by the
* application.
+ *
* <p><pre class="prettyprint">
* {@literal
* <receiver android:name=".TvInputReceiver">
@@ -143,13 +194,13 @@ public final class TvInputManager {
* <meta-data
* android:name="android.media.tv.metadata.CONTENT_RATING_SYSTEMS"
* android:resource="@xml/tv_content_rating_systems" />
- * </receiver>}</pre></p>
- * In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
+ * </receiver>}</pre>
+ *
+ * <p>In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
* XML resource whose root element is <code>&lt;rating-system-definitions&gt;</code> that
* contains zero or more <code>&lt;rating-system-definition&gt;</code> elements. Each <code>
* &lt;rating-system-definition&gt;</code> element specifies the ratings, sub-ratings and rating
* orders of a particular content rating system.
- * </p>
*
* @see TvContentRating
*/
@@ -158,10 +209,9 @@ public final class TvInputManager {
/**
* Content rating systems metadata associated with {@link #ACTION_QUERY_CONTENT_RATING_SYSTEMS}.
- * <p>
- * Specifies the resource ID of an XML resource that describes the content rating systems that
- * are provided by the application.
- * </p>
+ *
+ * <p>Specifies the resource ID of an XML resource that describes the content rating systems
+ * that are provided by the application.
*/
public static final String META_DATA_CONTENT_RATING_SYSTEMS =
"android.media.tv.metadata.CONTENT_RATING_SYSTEMS";
@@ -204,7 +254,7 @@ public final class TvInputManager {
* @param session A {@link TvInputManager.Session} instance created. This can be
* {@code null} if the creation request failed.
*/
- public void onSessionCreated(Session session) {
+ public void onSessionCreated(@Nullable Session session) {
}
/**
@@ -245,7 +295,7 @@ public final class TvInputManager {
* @param trackId The ID of the selected track. When {@code null} the currently selected
* track for a given type should be unselected.
*/
- public void onTrackSelected(Session session, int type, String trackId) {
+ public void onTrackSelected(Session session, int type, @Nullable String trackId) {
}
/**
@@ -271,13 +321,14 @@ public final class TvInputManager {
/**
* This is called when the video is not available, so the TV input stops the playback.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param reason The reason why the TV input stopped the playback:
* <ul>
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onVideoUnavailable(Session session, int reason) {
@@ -287,7 +338,7 @@ public final class TvInputManager {
* This is called when the current program content turns out to be allowed to watch since
* its content rating is not blocked by parental controls.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
*/
public void onContentAllowed(Session session) {
}
@@ -296,7 +347,7 @@ public final class TvInputManager {
* This is called when the current program content turns out to be not allowed to watch
* since its content rating is blocked by parental controls.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param rating The content ration of the blocked program.
*/
public void onContentBlocked(Session session, TvContentRating rating) {
@@ -306,7 +357,7 @@ public final class TvInputManager {
* This is called when {@link TvInputService.Session#layoutSurface} is called to change the
* layout of surface.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param left Left position.
* @param top Top position.
* @param right Right position.
@@ -328,6 +379,45 @@ public final class TvInputManager {
@SystemApi
public void onSessionEvent(Session session, String eventType, Bundle eventArgs) {
}
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ }
+
+ /**
+ * This is called when the start playback position is changed.
+ *
+ * <p>The start playback position of the time shifted program should be adjusted when the TV
+ * input cannot retain the whole recorded program due to some reason (e.g. limitation on
+ * storage space). This is necessary to prevent the application from allowing the user to
+ * seek to a time position that is not reachable.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The start playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ }
+
+ /**
+ * This is called when the current playback position is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The current playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ }
}
private static final class SessionCallbackRecord {
@@ -450,10 +540,37 @@ public final class TvInputManager {
}
});
}
+
+ void postTimeShiftStatusChanged(final int status) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStatusChanged(mSession, status);
+ }
+ });
+ }
+
+ void postTimeShiftStartPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStartPositionChanged(mSession, timeMs);
+ }
+ });
+ }
+
+ void postTimeShiftCurrentPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(mSession, timeMs);
+ }
+ });
+ }
}
/**
- * Callback used to monitor status of the TV input.
+ * Callback used to monitor status of the TV inputs.
*/
public abstract static class TvInputCallback {
/**
@@ -471,7 +588,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is added.
+ * This is called when a TV input is added to the system.
+ *
+ * <p>Normally it happens when the user installs a new TV input package that implements
+ * {@link TvInputService} interface.
*
* @param inputId The id of the TV input.
*/
@@ -479,7 +599,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is removed.
+ * This is called when a TV input is removed from the system.
+ *
+ * <p>Normally it happens when the user uninstalls the previously installed TV input
+ * package.
*
* @param inputId The id of the TV input.
*/
@@ -487,9 +610,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is updated. The update of TV input happens when it is
- * reinstalled or the media on which the newer version of TV input exists is
- * available/unavailable.
+ * This is called when a TV input is updated on the system.
+ *
+ * <p>Normally it happens when a previously installed TV input package is re-installed or
+ * the media on which a newer version of the package exists becomes available/unavailable.
*
* @param inputId The id of the TV input.
* @hide
@@ -718,6 +842,42 @@ public final class TvInputManager {
record.postSessionEvent(eventType, eventArgs);
}
}
+
+ @Override
+ public void onTimeShiftStatusChanged(int status, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStatusChanged(status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStartPositionChanged(timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftCurrentPositionChanged(timeMs);
+ }
+ }
};
mManagerCallback = new ITvInputManagerCallback.Stub() {
@Override
@@ -766,15 +926,12 @@ public final class TvInputManager {
synchronized (mLock) {
for (TvInputInfo info : infos) {
String inputId = info.getId();
- int state = mService.getTvInputState(inputId, mUserId);
- if (state != INPUT_STATE_UNKNOWN) {
- mStateMap.put(inputId, state);
- }
+ mStateMap.put(inputId, mService.getTvInputState(inputId, mUserId));
}
}
}
} catch (RemoteException e) {
- Log.e(TAG, "TvInputManager initialization failed: " + e);
+ Log.e(TAG, "TvInputManager initialization failed", e);
}
}
@@ -797,10 +954,9 @@ public final class TvInputManager {
* @param inputId The ID of the TV input.
* @return the {@link TvInputInfo} for a given TV input. {@code null} if not found.
*/
- public TvInputInfo getTvInputInfo(String inputId) {
- if (inputId == null) {
- throw new IllegalArgumentException("inputId cannot be null");
- }
+ @Nullable
+ public TvInputInfo getTvInputInfo(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
try {
return mService.getTvInputInfo(inputId, mUserId);
} catch (RemoteException e) {
@@ -809,7 +965,9 @@ public final class TvInputManager {
}
/**
- * Returns the state of a given TV input. It returns one of the following:
+ * Returns the state of a given TV input.
+ *
+ * <p>The state is one of the following:
* <ul>
* <li>{@link #INPUT_STATE_CONNECTED}
* <li>{@link #INPUT_STATE_CONNECTED_STANDBY}
@@ -817,17 +975,15 @@ public final class TvInputManager {
* </ul>
*
* @param inputId The id of the TV input.
- * @throws IllegalArgumentException if the argument is {@code null} or if there is no
- * {@link TvInputInfo} corresponding to {@code inputId}.
+ * @throws IllegalArgumentException if the argument is {@code null}.
*/
- public int getInputState(String inputId) {
- if (inputId == null) {
- throw new IllegalArgumentException("inputId cannot be null");
- }
+ public int getInputState(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
synchronized (mLock) {
Integer state = mStateMap.get(inputId);
if (state == null) {
- throw new IllegalArgumentException("Unrecognized input ID: " + inputId);
+ Log.w(TAG, "Unrecognized input ID: " + inputId);
+ return INPUT_STATE_DISCONNECTED;
}
return state.intValue();
}
@@ -838,15 +994,10 @@ public final class TvInputManager {
*
* @param callback A callback used to monitor status of the TV inputs.
* @param handler A {@link Handler} that the status change will be delivered to.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
*/
- public void registerCallback(TvInputCallback callback, Handler handler) {
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
- if (handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public void registerCallback(@NonNull TvInputCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
synchronized (mLock) {
mCallbackRecords.add(new TvInputCallbackRecord(callback, handler));
}
@@ -856,12 +1007,9 @@ public final class TvInputManager {
* Unregisters the existing {@link TvInputCallback}.
*
* @param callback The existing callback to remove.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
*/
- public void unregisterCallback(final TvInputCallback callback) {
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
+ public void unregisterCallback(@NonNull final TvInputCallback callback) {
+ Preconditions.checkNotNull(callback);
synchronized (mLock) {
for (Iterator<TvInputCallbackRecord> it = mCallbackRecords.iterator();
it.hasNext(); ) {
@@ -907,13 +1055,11 @@ public final class TvInputManager {
/**
* Checks whether a given TV content rating is blocked by the user.
*
- * @param rating The TV content rating to check.
+ * @param rating The TV content rating to check. Can be {@link TvContentRating#UNRATED}.
* @return {@code true} if the given TV content rating is blocked, {@code false} otherwise.
*/
- public boolean isRatingBlocked(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public boolean isRatingBlocked(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
return mService.isRatingBlocked(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -949,10 +1095,8 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
- public void addBlockedRating(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public void addBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
mService.addBlockedRating(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -969,10 +1113,8 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
- public void removeBlockedRating(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public void removeBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
mService.removeBlockedRating(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -995,29 +1137,21 @@ public final class TvInputManager {
/**
* Creates a {@link Session} for a given TV input.
- * <p>
- * The number of sessions that can be created at the same time is limited by the capability of
- * the given TV input.
- * </p>
+ *
+ * <p>The number of sessions that can be created at the same time is limited by the capability
+ * of the given TV input.
*
* @param inputId The id of the TV input.
* @param callback A callback used to receive the created session.
* @param handler A {@link Handler} that the session creation will be delivered to.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
* @hide
*/
@SystemApi
- public void createSession(String inputId, final SessionCallback callback,
- Handler handler) {
- if (inputId == null) {
- throw new IllegalArgumentException("id cannot be null");
- }
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
- if (handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public void createSession(@NonNull String inputId, @NonNull final SessionCallback callback,
+ @NonNull Handler handler) {
+ Preconditions.checkNotNull(inputId);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
SessionCallbackRecord record = new SessionCallbackRecord(callback, handler);
synchronized (mSessionCallbackRecordMap) {
int seq = mNextSeq++;
@@ -1144,6 +1278,43 @@ public final class TvInputManager {
}
/**
+ * Returns the list of currently available DVB devices on the system.
+ *
+ * @return the list of {@link DvbDeviceInfo} objects representing available DVB devices.
+ * @hide
+ */
+ public List<DvbDeviceInfo> getDvbDeviceList() {
+ try {
+ return mService.getDvbDeviceList();
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Returns a {@link ParcelFileDescriptor} of a specified DVB device for a given
+ * {@link DvbDeviceInfo}
+ *
+ * @param info A {@link DvbDeviceInfo} to open a DVB device.
+ * @param device A DVB device. The DVB device can be {@link #DVB_DEVICE_DEMUX},
+ * {@link #DVB_DEVICE_DVR} or {@link #DVB_DEVICE_FRONTEND}.
+ * @return a {@link ParcelFileDescriptor} of a specified DVB device for a given
+ * {@link DvbDeviceInfo}, or {@code null} if the given {@link DvbDeviceInfo} was invalid
+ * or the specified DVB device was busy with a previous request.
+ * @hide
+ */
+ public ParcelFileDescriptor openDvbDevice(DvbDeviceInfo info, int device) {
+ try {
+ if (DVB_DEVICE_START > device || DVB_DEVICE_END < device) {
+ throw new IllegalArgumentException("Invalid DVB device: " + device);
+ }
+ return mService.openDvbDevice(info, device);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
* The Session provides the per-session functionality of TV inputs.
* @hide
*/
@@ -1171,22 +1342,22 @@ public final class TvInputManager {
private TvInputEventSender mSender;
private InputChannel mChannel;
- private final Object mTrackLock = new Object();
- // @GuardedBy("mTrackLock")
+ private final Object mMetadataLock = new Object();
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mAudioTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mVideoTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mSubtitleTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedAudioTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedVideoTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedSubtitleTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private int mVideoWidth;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private int mVideoHeight;
private Session(IBinder token, InputChannel channel, ITvInputManager service, int userId,
@@ -1253,12 +1424,12 @@ public final class TvInputManager {
}
/**
- * Notifies of any structural changes (format or size) of the {@link Surface}
- * passed by {@link #setSurface}.
+ * Notifies of any structural changes (format or size) of the surface passed in
+ * {@link #setSurface}.
*
- * @param format The new PixelFormat of the {@link Surface}.
- * @param width The new width of the {@link Surface}.
- * @param height The new height of the {@link Surface}.
+ * @param format The new PixelFormat of the surface.
+ * @param width The new width of the surface.
+ * @param height The new height of the surface.
* @hide
*/
@SystemApi
@@ -1299,7 +1470,6 @@ public final class TvInputManager {
* Tunes to a given channel.
*
* @param channelUri The URI of a channel.
- * @throws IllegalArgumentException if the argument is {@code null}.
*/
public void tune(Uri channelUri) {
tune(channelUri, null);
@@ -1310,19 +1480,16 @@ public final class TvInputManager {
*
* @param channelUri The URI of a channel.
* @param params A set of extra parameters which might be handled with this tune event.
- * @throws IllegalArgumentException if {@code channelUri} is {@code null}.
* @hide
*/
@SystemApi
- public void tune(Uri channelUri, Bundle params) {
- if (channelUri == null) {
- throw new IllegalArgumentException("channelUri cannot be null");
- }
+ public void tune(@NonNull Uri channelUri, Bundle params) {
+ Preconditions.checkNotNull(channelUri);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
}
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
mAudioTracks.clear();
mVideoTracks.clear();
mSubtitleTracks.clear();
@@ -1366,8 +1533,8 @@ public final class TvInputManager {
* track of the given type will be unselected.
* @see #getTracks
*/
- public void selectTrack(int type, String trackId) {
- synchronized (mTrackLock) {
+ public void selectTrack(int type, @Nullable String trackId) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
if (trackId != null && !containsTrack(mAudioTracks, trackId)) {
Log.w(TAG, "Invalid audio trackId: " + trackId);
@@ -1415,8 +1582,9 @@ public final class TvInputManager {
* {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
* @return the list of tracks for the given type.
*/
+ @Nullable
public List<TvTrackInfo> getTracks(int type) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
if (mAudioTracks == null) {
return null;
@@ -1444,8 +1612,9 @@ public final class TvInputManager {
* @return the ID of the selected track.
* @see #selectTrack
*/
+ @Nullable
public String getSelectedTrack(int type) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
return mSelectedAudioTrackId;
} else if (type == TvTrackInfo.TYPE_VIDEO) {
@@ -1462,7 +1631,7 @@ public final class TvInputManager {
* there is an update.
*/
boolean updateTracks(List<TvTrackInfo> tracks) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
mAudioTracks.clear();
mVideoTracks.clear();
mSubtitleTracks.clear();
@@ -1485,7 +1654,7 @@ public final class TvInputManager {
* Returns true if there is an update.
*/
boolean updateTrackSelection(int type, String trackId) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO && trackId != mSelectedAudioTrackId) {
mSelectedAudioTrackId = trackId;
return true;
@@ -1509,7 +1678,7 @@ public final class TvInputManager {
* track.
*/
TvTrackInfo getVideoTrackToNotify() {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (!mVideoTracks.isEmpty() && mSelectedVideoTrackId != null) {
for (TvTrackInfo track : mVideoTracks) {
if (track.getId().equals(mSelectedVideoTrackId)) {
@@ -1528,6 +1697,91 @@ public final class TvInputManager {
}
/**
+ * Pauses the playback. Call {@link #timeShiftResume()} to restart the playback.
+ */
+ void timeShiftPause() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftPause(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Resumes the playback. No-op if it is already playing the channel.
+ */
+ void timeShiftResume() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftResume(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Seeks to a specified time position.
+ *
+ * <p>Normally, the position is given within range between the start and the current time,
+ * inclusively.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see TvView.TimeShiftPositionCallback#onTimeShiftStartPositionChanged
+ */
+ void timeShiftSeekTo(long timeMs) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftSeekTo(mToken, timeMs, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Sets playback rate using {@link android.media.PlaybackParams}.
+ *
+ * @param params The playback params.
+ */
+ void timeShiftSetPlaybackParams(PlaybackParams params) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftSetPlaybackParams(mToken, params, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftEnablePositionTracking(mToken, enable, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
@@ -1559,16 +1813,11 @@ public final class TvInputManager {
*
* @param view A view playing TV.
* @param frame A position of the overlay view.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
* @throws IllegalStateException if {@code view} is not attached to a window.
*/
- void createOverlayView(View view, Rect frame) {
- if (view == null) {
- throw new IllegalArgumentException("view cannot be null");
- }
- if (frame == null) {
- throw new IllegalArgumentException("frame cannot be null");
- }
+ void createOverlayView(@NonNull View view, @NonNull Rect frame) {
+ Preconditions.checkNotNull(view);
+ Preconditions.checkNotNull(frame);
if (view.getWindowToken() == null) {
throw new IllegalStateException("view must be attached to a window");
}
@@ -1587,12 +1836,9 @@ public final class TvInputManager {
* Relayouts the current overlay view.
*
* @param frame A new position of the overlay view.
- * @throws IllegalArgumentException if the arguments is {@code null}.
*/
- void relayoutOverlayView(Rect frame) {
- if (frame == null) {
- throw new IllegalArgumentException("frame cannot be null");
- }
+ void relayoutOverlayView(@NonNull Rect frame) {
+ Preconditions.checkNotNull(frame);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
@@ -1622,16 +1868,14 @@ public final class TvInputManager {
/**
* Requests to unblock content blocked by parental controls.
*/
- void requestUnblockContent(TvContentRating unblockedRating) {
+ void unblockContent(@NonNull TvContentRating unblockedRating) {
+ Preconditions.checkNotNull(unblockedRating);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
}
- if (unblockedRating == null) {
- throw new IllegalArgumentException("unblockedRating cannot be null");
- }
try {
- mService.requestUnblockContent(mToken, unblockedRating.flattenToString(), mUserId);
+ mService.unblockContent(mToken, unblockedRating.flattenToString(), mUserId);
} catch (RemoteException e) {
throw new RuntimeException(e);
}
@@ -1640,25 +1884,22 @@ public final class TvInputManager {
/**
* Dispatches an input event to this session.
*
- * @param event An {@link InputEvent} to dispatch.
+ * @param event An {@link InputEvent} to dispatch. Cannot be {@code null}.
* @param token A token used to identify the input event later in the callback.
- * @param callback A callback used to receive the dispatch result.
- * @param handler A {@link Handler} that the dispatch result will be delivered to.
+ * @param callback A callback used to receive the dispatch result. Cannot be {@code null}.
+ * @param handler A {@link Handler} that the dispatch result will be delivered to. Cannot be
+ * {@code null}.
* @return Returns {@link #DISPATCH_HANDLED} if the event was handled. Returns
* {@link #DISPATCH_NOT_HANDLED} if the event was not handled. Returns
* {@link #DISPATCH_IN_PROGRESS} if the event is in progress and the callback will
* be invoked later.
- * @throws IllegalArgumentException if any of the necessary arguments is {@code null}.
* @hide
*/
- public int dispatchInputEvent(InputEvent event, Object token,
- FinishedInputEventCallback callback, Handler handler) {
- if (event == null) {
- throw new IllegalArgumentException("event cannot be null");
- }
- if (callback != null && handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public int dispatchInputEvent(@NonNull InputEvent event, Object token,
+ @NonNull FinishedInputEventCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(event);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
synchronized (mHandler) {
if (mChannel == null) {
return DISPATCH_NOT_HANDLED;
@@ -1886,7 +2127,7 @@ public final class TvInputManager {
/**
* The Hardware provides the per-hardware functionality of TV hardware.
*
- * TV hardware is physical hardware attached to the Android device; for example, HDMI ports,
+ * <p>TV hardware is physical hardware attached to the Android device; for example, HDMI ports,
* Component/Composite ports, etc. Specifically, logical devices such as HDMI CEC logical
* devices don't fall into this category.
*
diff --git a/media/java/android/media/tv/TvInputService.java b/media/java/android/media/tv/TvInputService.java
index b7e766b..4b84090 100644
--- a/media/java/android/media/tv/TvInputService.java
+++ b/media/java/android/media/tv/TvInputService.java
@@ -16,6 +16,8 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SuppressLint;
import android.annotation.SystemApi;
import android.app.Service;
@@ -24,6 +26,8 @@ import android.content.Intent;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.hardware.hdmi.HdmiDeviceInfo;
+import android.media.PlaybackParams;
+import android.media.tv.TvInputService.HardwareSession;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
@@ -48,8 +52,8 @@ import android.view.WindowManager;
import android.view.accessibility.CaptioningManager;
import android.widget.FrameLayout;
-import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.os.SomeArgs;
+import com.android.internal.util.Preconditions;
import java.util.ArrayList;
import java.util.HashSet;
@@ -59,15 +63,14 @@ import java.util.Set;
/**
* The TvInputService class represents a TV input or source such as HDMI or built-in tuner which
* provides pass-through video or broadcast TV programs.
- * <p>
- * Applications will not normally use this service themselves, instead relying on the standard
+ *
+ * <p>Applications will not normally use this service themselves, instead relying on the standard
* interaction provided by {@link TvView}. Those implementing TV input services should normally do
* so by deriving from this class and providing their own session implementation based on
* {@link TvInputService.Session}. All TV input services must require that clients hold the
* {@link android.Manifest.permission#BIND_TV_INPUT} in order to interact with the service; if this
* permission is not specified in the manifest, the system will refuse to bind to that TV input
* service.
- * </p>
*/
public abstract class TvInputService extends Service {
private static final boolean DEBUG = false;
@@ -160,13 +163,14 @@ public abstract class TvInputService extends Service {
/**
* Returns a concrete implementation of {@link Session}.
- * <p>
- * May return {@code null} if this TV input service fails to create a session for some reason.
- * If TV input represents an external device connected to a hardware TV input,
+ *
+ * <p>May return {@code null} if this TV input service fails to create a session for some
+ * reason. If TV input represents an external device connected to a hardware TV input,
* {@link HardwareSession} should be returned.
- * </p>
+ *
* @param inputId The ID of the TV input associated with the session.
*/
+ @Nullable
public abstract Session onCreateSession(String inputId);
/**
@@ -177,6 +181,7 @@ public abstract class TvInputService extends Service {
* @param hardwareInfo {@link TvInputHardwareInfo} object just added.
* @hide
*/
+ @Nullable
@SystemApi
public TvInputInfo onHardwareAdded(TvInputHardwareInfo hardwareInfo) {
return null;
@@ -190,6 +195,7 @@ public abstract class TvInputService extends Service {
* @param hardwareInfo {@link TvInputHardwareInfo} object just removed.
* @hide
*/
+ @Nullable
@SystemApi
public String onHardwareRemoved(TvInputHardwareInfo hardwareInfo) {
return null;
@@ -203,6 +209,7 @@ public abstract class TvInputService extends Service {
* @param deviceInfo {@link HdmiDeviceInfo} object just added.
* @hide
*/
+ @Nullable
@SystemApi
public TvInputInfo onHdmiDeviceAdded(HdmiDeviceInfo deviceInfo) {
return null;
@@ -216,6 +223,7 @@ public abstract class TvInputService extends Service {
* @param deviceInfo {@link HdmiDeviceInfo} object just removed.
* @hide
*/
+ @Nullable
@SystemApi
public String onHdmiDeviceRemoved(HdmiDeviceInfo deviceInfo) {
return null;
@@ -236,25 +244,31 @@ public abstract class TvInputService extends Service {
* Base class for derived classes to implement to provide a TV input session.
*/
public abstract static class Session implements KeyEvent.Callback {
- private static final int DETACH_OVERLAY_VIEW_TIMEOUT = 5000;
+ private static final int DETACH_OVERLAY_VIEW_TIMEOUT_MS = 5000;
+ private static final int POSITION_UPDATE_INTERVAL_MS = 1000;
+
private final KeyEvent.DispatcherState mDispatcherState = new KeyEvent.DispatcherState();
private final WindowManager mWindowManager;
final Handler mHandler;
private WindowManager.LayoutParams mWindowParams;
private Surface mSurface;
- private Context mContext;
+ private final Context mContext;
private FrameLayout mOverlayViewContainer;
private View mOverlayView;
private OverlayViewCleanUpTask mOverlayViewCleanUpTask;
private boolean mOverlayViewEnabled;
private IBinder mWindowToken;
private Rect mOverlayFrame;
+ private long mStartPositionMs;
+ private long mCurrentPositionMs;
+ private final TimeShiftPositionTrackingRunnable
+ mTimeShiftPositionTrackingRunnable = new TimeShiftPositionTrackingRunnable();
- private Object mLock = new Object();
+ private final Object mLock = new Object();
// @GuardedBy("mLock")
private ITvInputSessionCallback mSessionCallback;
// @GuardedBy("mLock")
- private List<Runnable> mPendingActions = new ArrayList<>();
+ private final List<Runnable> mPendingActions = new ArrayList<>();
/**
* Creates a new Session.
@@ -265,11 +279,19 @@ public abstract class TvInputService extends Service {
mContext = context;
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
mHandler = new Handler(context.getMainLooper());
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
}
/**
- * Enables or disables the overlay view. By default, the overlay view is disabled. Must be
- * called explicitly after the session is created to enable the overlay view.
+ * Enables or disables the overlay view.
+ *
+ * <p>By default, the overlay view is disabled. Must be called explicitly after the
+ * session is created to enable the overlay view.
+ *
+ * <p>The TV input service can disable its overlay view when the size of the overlay view is
+ * insufficient to display the whole information, such as when used in Picture-in-picture.
+ * Override {@link #onOverlayViewSizeChanged} to get the size of the overlay view, which
+ * then can be used to determine whether to enable/disable the overlay view.
*
* @param enable {@code true} if you want to enable the overlay view. {@code false}
* otherwise.
@@ -301,10 +323,8 @@ public abstract class TvInputService extends Service {
* @hide
*/
@SystemApi
- public void notifySessionEvent(final String eventType, final Bundle eventArgs) {
- if (eventType == null) {
- throw new IllegalArgumentException("eventType should not be null.");
- }
+ public void notifySessionEvent(@NonNull final String eventType, final Bundle eventArgs) {
+ Preconditions.checkNotNull(eventType);
executeOrPostRunnable(new Runnable() {
@Override
public void run() {
@@ -314,7 +334,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onSessionEvent(eventType, eventArgs);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in sending event (event=" + eventType + ")");
+ Log.w(TAG, "error in sending event (event=" + eventType + ")", e);
}
}
});
@@ -335,7 +355,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onChannelRetuned(channelUri);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyChannelRetuned");
+ Log.w(TAG, "error in notifyChannelRetuned", e);
}
}
});
@@ -374,7 +394,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onTracksChanged(tracks);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyTracksChanged");
+ Log.w(TAG, "error in notifyTracksChanged", e);
}
}
});
@@ -404,7 +424,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onTrackSelected(type, trackId);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyTrackSelected");
+ Log.w(TAG, "error in notifyTrackSelected", e);
}
}
});
@@ -427,7 +447,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onVideoAvailable();
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyVideoAvailable");
+ Log.w(TAG, "error in notifyVideoAvailable", e);
}
}
});
@@ -444,6 +464,7 @@ public abstract class TvInputService extends Service {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
* @see #notifyVideoAvailable
*/
@@ -461,7 +482,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onVideoUnavailable(reason);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyVideoUnavailable");
+ Log.w(TAG, "error in notifyVideoUnavailable", e);
}
}
});
@@ -469,8 +490,8 @@ public abstract class TvInputService extends Service {
/**
* Informs the application that the user is allowed to watch the current program content.
- * <p>
- * Each TV input service is required to query the system whether the user is allowed to
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
* watch the current program before showing it to the user if the parental controls is
* enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
* TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
@@ -481,13 +502,12 @@ public abstract class TvInputService extends Service {
* result. If the rating in question turns out to be allowed by the user, the TV input
* service must call this method to notify the application that is permitted to show the
* content.
- * </p><p>
- * Each TV input service also needs to continuously listen to any changes made to the
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
* parental controls settings by registering a broadcast receiver to receive
* {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
* {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
* reevaluate the current program with the new parental controls settings.
- * </p>
*
* @see #notifyContentBlocked
* @see TvInputManager
@@ -502,7 +522,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onContentAllowed();
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyContentAllowed");
+ Log.w(TAG, "error in notifyContentAllowed", e);
}
}
});
@@ -510,31 +530,33 @@ public abstract class TvInputService extends Service {
/**
* Informs the application that the current program content is blocked by parent controls.
- * <p>
- * Each TV input service is required to query the system whether the user is allowed to
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
* watch the current program before showing it to the user if the parental controls is
* enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
* TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
* service should block the content or not is determined by invoking
* {@link TvInputManager#isRatingBlocked TvInputManager.isRatingBlocked(TvContentRating)}
- * with the content rating for the current program. Then the {@link TvInputManager} makes a
- * judgment based on the user blocked ratings stored in the secure settings and returns the
- * result. If the rating in question turns out to be blocked, the TV input service must
- * immediately block the content and call this method with the content rating of the current
- * program to prompt the PIN verification screen.
- * </p><p>
- * Each TV input service also needs to continuously listen to any changes made to the
+ * with the content rating for the current program or {@link TvContentRating#UNRATED} in
+ * case the rating information is missing. Then the {@link TvInputManager} makes a judgment
+ * based on the user blocked ratings stored in the secure settings and returns the result.
+ * If the rating in question turns out to be blocked, the TV input service must immediately
+ * block the content and call this method with the content rating of the current program to
+ * prompt the PIN verification screen.
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
* parental controls settings by registering a broadcast receiver to receive
* {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
* {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
* reevaluate the current program with the new parental controls settings.
- * </p>
*
- * @param rating The content rating for the current TV program.
+ * @param rating The content rating for the current TV program. Can be
+ * {@link TvContentRating#UNRATED}.
* @see #notifyContentAllowed
* @see TvInputManager
*/
- public void notifyContentBlocked(final TvContentRating rating) {
+ public void notifyContentBlocked(@NonNull final TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
executeOrPostRunnable(new Runnable() {
@Override
public void run() {
@@ -544,24 +566,95 @@ public abstract class TvInputService extends Service {
mSessionCallback.onContentBlocked(rating.flattenToString());
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyContentBlocked");
+ Log.w(TAG, "error in notifyContentBlocked", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the time shift status is changed.
+ *
+ * <p>Prior to calling this method, the application assumes the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_UNKNOWN}. Right after the session is created, it
+ * is important to invoke the method with the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} if the implementation does support
+ * time shifting, or {@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED} otherwise. Failure
+ * to notifying the current status change immediately might result in an undesirable
+ * behavior in the application such as hiding the play controls.
+ *
+ * <p>If the status {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} is reported, the
+ * application assumes it can pause/resume playback, seek to a specified time position and
+ * set playback rate and audio mode. The implementation should override
+ * {@link #onTimeShiftPause}, {@link #onTimeShiftResume}, {@link #onTimeShiftSeekTo},
+ * {@link #onTimeShiftGetStartPosition}, {@link #onTimeShiftGetCurrentPosition} and
+ * {@link #onTimeShiftSetPlaybackParams}.
+ *
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void notifyTimeShiftStatusChanged(final int status) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStatusChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStatusChanged(status);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStatusChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftStartPositionChanged(final long timeMs) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStartPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStartPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStartPositionChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftCurrentPositionChanged(final long timeMs) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftCurrentPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftCurrentPositionChanged", e);
}
}
});
}
/**
- * Assigns a position of the {@link Surface} passed by {@link #onSetSurface}. The position
- * is relative to an overlay view.
+ * Assigns a size and position to the surface passed in {@link #onSetSurface}. The position
+ * is relative to the overlay view that sits on top of this surface.
*
* @param left Left position in pixels, relative to the overlay view.
* @param top Top position in pixels, relative to the overlay view.
* @param right Right position in pixels, relative to the overlay view.
* @param bottom Bottom position in pixels, relative to the overlay view.
* @see #onOverlayViewSizeChanged
- * @hide
*/
- @SystemApi
public void layoutSurface(final int left, final int top, final int right,
final int bottom) {
if (left > right || top > bottom) {
@@ -577,7 +670,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onLayoutSurface(left, top, right, bottom);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in layoutSurface");
+ Log.w(TAG, "error in layoutSurface", e);
}
}
});
@@ -591,21 +684,20 @@ public abstract class TvInputService extends Service {
/**
* Sets the current session as the main session. The main session is a session whose
* corresponding TV input determines the HDMI-CEC active source device.
- * <p>
- * TV input service that manages HDMI-CEC logical device should implement {@link
+ *
+ * <p>TV input service that manages HDMI-CEC logical device should implement {@link
* #onSetMain} to (1) select the corresponding HDMI logical device as the source device
* when {@code isMain} is {@code true}, and to (2) select the internal device (= TV itself)
* as the source device when {@code isMain} is {@code false} and the session is still main.
* Also, if a surface is passed to a non-main session and active source is changed to
* initiate the surface, the active source should be returned to the main session.
- * </p><p>
- * {@link TvView} guarantees that, when tuning involves a session transition, {@code
+ *
+ * <p>{@link TvView} guarantees that, when tuning involves a session transition, {@code
* onSetMain(true)} for new session is called first, {@code onSetMain(false)} for old
* session is called afterwards. This allows {@code onSetMain(false)} to be no-op when TV
* input service knows that the next main session corresponds to another HDMI logical
* device. Practically, this implies that one TV input service should handle all HDMI port
* and HDMI-CEC logical devices for smooth active source transition.
- * </p>
*
* @param isMain If true, session should become main.
* @see TvView#setMain
@@ -616,55 +708,62 @@ public abstract class TvInputService extends Service {
}
/**
- * Sets the {@link Surface} for the current input session on which the TV input renders
- * video.
+ * Called when the application sets the surface.
+ *
+ * <p>The TV input service should render video onto the given surface. When called with
+ * {@code null}, the input service should immediately release any references to the
+ * currently set surface and stop using it.
*
- * @param surface {@link Surface} an application passes to this TV input session.
- * @return {@code true} if the surface was set, {@code false} otherwise.
+ * @param surface The surface to be used for video rendering. Can be {@code null}.
+ * @return {@code true} if the surface was set successfully, {@code false} otherwise.
*/
- public abstract boolean onSetSurface(Surface surface);
+ public abstract boolean onSetSurface(@Nullable Surface surface);
/**
- * Called after any structural changes (format or size) have been made to the
- * {@link Surface} passed by {@link #onSetSurface}. This method is always called
- * at least once, after {@link #onSetSurface} with non-null {@link Surface} is called.
+ * Called after any structural changes (format or size) have been made to the surface passed
+ * in {@link #onSetSurface}. This method is always called at least once, after
+ * {@link #onSetSurface} is called with non-null surface.
*
- * @param format The new PixelFormat of the {@link Surface}.
- * @param width The new width of the {@link Surface}.
- * @param height The new height of the {@link Surface}.
+ * @param format The new PixelFormat of the surface.
+ * @param width The new width of the surface.
+ * @param height The new height of the surface.
*/
public void onSurfaceChanged(int format, int width, int height) {
}
/**
- * Called when a size of an overlay view is changed by an application. Even when the overlay
- * view is disabled by {@link #setOverlayViewEnabled}, this is called. The size is same as
- * the size of {@link Surface} in general. Once {@link #layoutSurface} is called, the sizes
- * of {@link Surface} and the overlay view can be different.
+ * Called when the size of the overlay view is changed by the application.
+ *
+ * <p>This is always called at least once when the session is created regardless of whether
+ * the overlay view is enabled or not. The overlay view size is the same as the containing
+ * {@link TvView}. Note that the size of the underlying surface can be different if the
+ * surface was changed by calling {@link #layoutSurface}.
*
* @param width The width of the overlay view.
* @param height The height of the overlay view.
- * @hide
*/
- @SystemApi
public void onOverlayViewSizeChanged(int width, int height) {
}
/**
- * Sets the relative stream volume of the current TV input session to handle the change of
- * audio focus by setting.
+ * Sets the relative stream volume of the current TV input session.
+ *
+ * <p>The implementation should honor this request in order to handle audio focus changes or
+ * mute the current session when multiple sessions, possibly from different inputs are
+ * active. If the method has not yet been called, the implementation should assume the
+ * default value of {@code 1.0f}.
*
- * @param volume Volume scale from 0.0 to 1.0.
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
*/
public abstract void onSetStreamVolume(float volume);
/**
* Tunes to a given channel. When the video is available, {@link #notifyVideoAvailable()}
- * should be called. Also, {@link #notifyVideoUnavailable(int)} should be called when the
- * TV input cannot continue playing the given channel.
+ * should be called. Also, {@link #notifyVideoUnavailable(int)} should be called when the TV
+ * input cannot continue playing the given channel.
*
* @param channelUri The URI of the channel.
- * @return {@code true} the tuning was successful, {@code false} otherwise.
+ * @return {@code true} if the tuning was successful, {@code false} otherwise.
*/
public abstract boolean onTune(Uri channelUri);
@@ -673,7 +772,7 @@ public abstract class TvInputService extends Service {
*
* @param channelUri The URI of the channel.
* @param params The extra parameters from other applications.
- * @return {@code true} the tuning was successful, {@code false} otherwise.
+ * @return {@code true} if the tuning was successful, {@code false} otherwise.
* @hide
*/
@SystemApi
@@ -683,8 +782,8 @@ public abstract class TvInputService extends Service {
/**
* Enables or disables the caption.
- * <p>
- * The locale for the user's preferred captioning language can be obtained by calling
+ *
+ * <p>The locale for the user's preferred captioning language can be obtained by calling
* {@link CaptioningManager#getLocale CaptioningManager.getLocale()}.
*
* @param enabled {@code true} to enable, {@code false} to disable.
@@ -694,14 +793,13 @@ public abstract class TvInputService extends Service {
/**
* Requests to unblock the content according to the given rating.
- * <p>
- * The implementation should unblock the content.
+ *
+ * <p>The implementation should unblock the content.
* TV input service has responsibility to decide when/how the unblock expires
* while it can keep previously unblocked ratings in order not to ask a user
* to unblock whenever a content rating is changed.
* Therefore an unblocked rating can be valid for a channel, a program,
* or certain amount of time depending on the implementation.
- * </p>
*
* @param unblockedRating An unblocked content rating
*/
@@ -709,20 +807,21 @@ public abstract class TvInputService extends Service {
}
/**
- * Select a given track.
- * <p>
- * If this is done successfully, the implementation should call {@link #notifyTrackSelected}
- * to help applications maintain the selcted track lists.
- * </p>
+ * Selects a given track.
+ *
+ * <p>If this is done successfully, the implementation should call
+ * {@link #notifyTrackSelected} to help applications maintain the up-to-date list of the
+ * selected tracks.
*
* @param trackId The ID of the track to select. {@code null} means to unselect the current
* track for a given type.
* @param type The type of the track to select. The type can be
* {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
* {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @return {@code true} if the track selection was successful, {@code false} otherwise.
* @see #notifyTrackSelected
*/
- public boolean onSelectTrack(int type, String trackId) {
+ public boolean onSelectTrack(int type, @Nullable String trackId) {
return false;
}
@@ -738,11 +837,11 @@ public abstract class TvInputService extends Service {
* @hide
*/
@SystemApi
- public void onAppPrivateCommand(String action, Bundle data) {
+ public void onAppPrivateCommand(@NonNull String action, Bundle data) {
}
/**
- * Called when an application requests to create an overlay view. Each session
+ * Called when the application requests to create an overlay view. Each session
* implementation can override this method and return its own view.
*
* @return a view attached to the overlay window
@@ -752,13 +851,106 @@ public abstract class TvInputService extends Service {
}
/**
+ * Called when the application requests to pause playback.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackParams
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftPause() {
+ }
+
+ /**
+ * Called when the application requests to resume playback.
+ *
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackParams
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftResume() {
+ }
+
+ /**
+ * Called when the application requests to seek to a specified time position. Normally, the
+ * position is given within range between the start and the current time, inclusively. The
+ * implementation is expected to seek to the nearest time position if the given position is
+ * not in the range.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSetPlaybackParams
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftSeekTo(long timeMs) {
+ }
+
+ /**
+ * Called when the application sets playback parameters containing the speed and audio mode.
+ *
+ * <p>Once the playback parameters are set, the implementation should honor the current
+ * settings until the next tune request. Pause/resume/seek request does not reset the
+ * parameters previously set.
+ *
+ * @param params The playback params.
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftSetPlaybackParams(PlaybackParams params) {
+ }
+
+ /**
+ * Returns the start playback position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * <p>The start playback position of the time shifted program should be adjusted when the
+ * implementation cannot retain the whole recorded program due to some reason (e.g.
+ * limitation on storage space). It is the earliest possible time position that the user can
+ * seek to, thus failure to notifying its change immediately might result in bad experience
+ * where the application allows the user to seek to an invalid time position.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackParams
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public long onTimeShiftGetStartPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
+ * Returns the current playback position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackParams
+ * @see #onTimeShiftGetStartPosition
+ */
+ public long onTimeShiftGetCurrentPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
* Default implementation of {@link android.view.KeyEvent.Callback#onKeyDown(int, KeyEvent)
* KeyEvent.Callback.onKeyDown()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key down events before they are processed by the application.
- * If you return true, the application will not process the event itself. If you return
- * false, the normal application processing will occur as if the TV input had not seen the
- * event at all.
+ *
+ * <p>Override this to intercept key down events before they are processed by the
+ * application. If you return true, the application will not process the event itself. If
+ * you return false, the normal application processing will occur as if the TV input had not
+ * seen the event at all.
*
* @param keyCode The value in event.getKeyCode().
* @param event Description of the key event.
@@ -774,8 +966,8 @@ public abstract class TvInputService extends Service {
* Default implementation of
* {@link android.view.KeyEvent.Callback#onKeyLongPress(int, KeyEvent)
* KeyEvent.Callback.onKeyLongPress()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key long press events before they are processed by the
+ *
+ * <p>Override this to intercept key long press events before they are processed by the
* application. If you return true, the application will not process the event itself. If
* you return false, the normal application processing will occur as if the TV input had not
* seen the event at all.
@@ -794,11 +986,11 @@ public abstract class TvInputService extends Service {
* Default implementation of
* {@link android.view.KeyEvent.Callback#onKeyMultiple(int, int, KeyEvent)
* KeyEvent.Callback.onKeyMultiple()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept special key multiple events before they are processed by the
- * application. If you return true, the application will not itself process the event. If
- * you return false, the normal application processing will occur as if the TV input had not
- * seen the event at all.
+ *
+ * <p>Override this to intercept special key multiple events before they are processed by
+ * the application. If you return true, the application will not itself process the event.
+ * If you return false, the normal application processing will occur as if the TV input had
+ * not seen the event at all.
*
* @param keyCode The value in event.getKeyCode().
* @param count The number of times the action was made.
@@ -814,9 +1006,9 @@ public abstract class TvInputService extends Service {
/**
* Default implementation of {@link android.view.KeyEvent.Callback#onKeyUp(int, KeyEvent)
* KeyEvent.Callback.onKeyUp()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key up events before they are processed by the application. If
- * you return true, the application will not itself process the event. If you return false,
+ *
+ * <p>Override this to intercept key up events before they are processed by the application.
+ * If you return true, the application will not itself process the event. If you return false,
* the normal application processing will occur as if the TV input had not seen the event at
* all.
*
@@ -883,6 +1075,7 @@ public abstract class TvInputService extends Service {
// Removes the overlay view lastly so that any hanging on the main thread can be handled
// in {@link #scheduleOverlayViewCleanup}.
removeOverlayView(true);
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
}
/**
@@ -926,6 +1119,7 @@ public abstract class TvInputService extends Service {
* Calls {@link #onTune}.
*/
void tune(Uri channelUri, Bundle params) {
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
onTune(channelUri, params);
// TODO: Handle failure.
}
@@ -963,7 +1157,7 @@ public abstract class TvInputService extends Service {
* Creates an overlay view. This calls {@link #onCreateOverlayView} to get a view to attach
* to the overlay window.
*
- * @param windowToken A window token of an application.
+ * @param windowToken A window token of the application.
* @param frame A position of the overlay view.
*/
void createOverlayView(IBinder windowToken, Rect frame) {
@@ -1055,6 +1249,49 @@ public abstract class TvInputService extends Service {
}
/**
+ * Calls {@link #onTimeShiftPause}.
+ */
+ void timeShiftPause() {
+ onTimeShiftPause();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftResume}.
+ */
+ void timeShiftResume() {
+ onTimeShiftResume();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSeekTo}.
+ */
+ void timeShiftSeekTo(long timeMs) {
+ onTimeShiftSeekTo(timeMs);
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSetPlaybackParams}.
+ */
+ void timeShiftSetPlaybackParams(PlaybackParams params) {
+ onTimeShiftSetPlaybackParams(params);
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (enable) {
+ mHandler.post(mTimeShiftPositionTrackingRunnable);
+ } else {
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mStartPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+ }
+
+ /**
* Schedules a task which checks whether the overlay view is detached and kills the process
* if it is not. Note that this method is expected to be called in a non-main thread.
*/
@@ -1073,12 +1310,19 @@ public abstract class TvInputService extends Service {
int dispatchInputEvent(InputEvent event, InputEventReceiver receiver) {
if (DEBUG) Log.d(TAG, "dispatchInputEvent(" + event + ")");
boolean isNavigationKey = false;
+ boolean skipDispatchToOverlayView = false;
if (event instanceof KeyEvent) {
KeyEvent keyEvent = (KeyEvent) event;
- isNavigationKey = isNavigationKey(keyEvent.getKeyCode());
if (keyEvent.dispatch(this, mDispatcherState, this)) {
return TvInputManager.Session.DISPATCH_HANDLED;
}
+ isNavigationKey = isNavigationKey(keyEvent.getKeyCode());
+ // When media keys and KEYCODE_MEDIA_AUDIO_TRACK are dispatched to ViewRootImpl,
+ // ViewRootImpl always consumes the keys. In this case, the application loses
+ // a chance to handle media keys. Therefore, media keys are not dispatched to
+ // ViewRootImpl.
+ skipDispatchToOverlayView = KeyEvent.isMediaKey(keyEvent.getKeyCode())
+ || keyEvent.getKeyCode() == KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK;
} else if (event instanceof MotionEvent) {
MotionEvent motionEvent = (MotionEvent) event;
final int source = motionEvent.getSource();
@@ -1096,7 +1340,8 @@ public abstract class TvInputService extends Service {
}
}
}
- if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()) {
+ if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()
+ || skipDispatchToOverlayView) {
return TvInputManager.Session.DISPATCH_NOT_HANDLED;
}
if (!mOverlayViewContainer.hasWindowFocus()) {
@@ -1142,12 +1387,31 @@ public abstract class TvInputService extends Service {
}
}
+ private final class TimeShiftPositionTrackingRunnable implements Runnable {
+ @Override
+ public void run() {
+ long startPositionMs = onTimeShiftGetStartPosition();
+ if (mStartPositionMs != startPositionMs) {
+ mStartPositionMs = startPositionMs;
+ notifyTimeShiftStartPositionChanged(startPositionMs);
+ }
+ long currentPositionMs = onTimeShiftGetCurrentPosition();
+ if (mCurrentPositionMs != currentPositionMs) {
+ mCurrentPositionMs = currentPositionMs;
+ notifyTimeShiftCurrentPositionChanged(currentPositionMs);
+ }
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mHandler.postDelayed(mTimeShiftPositionTrackingRunnable,
+ POSITION_UPDATE_INTERVAL_MS);
+ }
+ }
+
private final class OverlayViewCleanUpTask extends AsyncTask<View, Void, Void> {
@Override
protected Void doInBackground(View... views) {
View overlayViewParent = views[0];
try {
- Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT);
+ Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT_MS);
} catch (InterruptedException e) {
return null;
}
@@ -1167,8 +1431,8 @@ public abstract class TvInputService extends Service {
/**
* Base class for a TV input session which represents an external device connected to a
* hardware TV input.
- * <p>
- * This class is for an input which provides channels for the external set-top box to the
+ *
+ * <p>This class is for an input which provides channels for the external set-top box to the
* application. Once a TV input returns an implementation of this class on
* {@link #onCreateSession(String)}, the framework will create a separate session for
* a hardware TV Input (e.g. HDMI 1) and forward the application's surface to the session so
@@ -1176,9 +1440,10 @@ public abstract class TvInputService extends Service {
* this TV input. The implementation of this class is expected to change the channel of the
* external set-top box via a proprietary protocol when {@link HardwareSession#onTune(Uri)} is
* requested by the application.
- * </p><p>
- * Note that this class is not for inputs for internal hardware like built-in tuner and HDMI 1.
- * </p>
+ *
+ * <p>Note that this class is not for inputs for internal hardware like built-in tuner and HDMI
+ * 1.
+ *
* @see #onCreateSession(String)
*/
public abstract static class HardwareSession extends Session {
@@ -1199,12 +1464,11 @@ public abstract class TvInputService extends Service {
/**
* Returns the hardware TV input ID the external device is connected to.
- * <p>
- * TV input is expected to provide {@link android.R.attr#setupActivity} so that
+ *
+ * <p>TV input is expected to provide {@link android.R.attr#setupActivity} so that
* the application can launch it before using this TV input. The setup activity may let
* the user select the hardware TV input to which the external device is connected. The ID
* of the selected one should be stored in the TV input so that it can be returned here.
- * </p>
*/
public abstract String getHardwareInputId();
@@ -1219,6 +1483,8 @@ public abstract class TvInputService extends Service {
args.arg2 = mProxySession;
args.arg3 = mProxySessionCallback;
args.arg4 = session.getToken();
+ session.tune(TvContract.buildChannelUriForPassthroughInput(
+ getHardwareInputId()));
} else {
args.arg1 = null;
args.arg2 = null;
@@ -1228,7 +1494,6 @@ public abstract class TvInputService extends Service {
}
mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED, args)
.sendToTarget();
- session.tune(TvContract.buildChannelUriForPassthroughInput(getHardwareInputId()));
}
@Override
@@ -1273,6 +1538,7 @@ public abstract class TvInputService extends Service {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onHardwareVideoUnavailable(int reason) { }
@@ -1313,7 +1579,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).addHardwareTvInput(deviceId, inputInfo);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastAddHardwareTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1325,7 +1591,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).addHdmiTvInput(id, inputInfo);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastAddHdmiTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1337,7 +1603,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).removeTvInput(inputId);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastRemoveTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1358,7 +1624,7 @@ public abstract class TvInputService extends Service {
// Failed to create a session.
cb.onSessionCreated(null, null);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
return;
}
@@ -1379,7 +1645,7 @@ public abstract class TvInputService extends Service {
try {
cb.onSessionCreated(null, null);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
return;
}
@@ -1410,7 +1676,7 @@ public abstract class TvInputService extends Service {
try {
cb.onSessionCreated(stub, hardwareSessionToken);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
if (sessionImpl != null) {
sessionImpl.initialize(cb);
diff --git a/media/java/android/media/tv/TvTrackInfo.java b/media/java/android/media/tv/TvTrackInfo.java
index e0aacd6..ed432c46 100644
--- a/media/java/android/media/tv/TvTrackInfo.java
+++ b/media/java/android/media/tv/TvTrackInfo.java
@@ -16,10 +16,13 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
+import com.android.internal.util.Preconditions;
+
/**
* Encapsulates the format of tracks played in {@link TvInputService}.
*/
@@ -42,24 +45,28 @@ public final class TvTrackInfo implements Parcelable {
private final int mType;
private final String mId;
private final String mLanguage;
+ private final CharSequence mDescription;
private final int mAudioChannelCount;
private final int mAudioSampleRate;
private final int mVideoWidth;
private final int mVideoHeight;
private final float mVideoFrameRate;
+ private final float mVideoPixelAspectRatio;
private final Bundle mExtra;
- private TvTrackInfo(int type, String id, String language, int audioChannelCount,
- int audioSampleRate, int videoWidth, int videoHeight, float videoFrameRate,
- Bundle extra) {
+ private TvTrackInfo(int type, String id, String language, CharSequence description,
+ int audioChannelCount, int audioSampleRate, int videoWidth, int videoHeight,
+ float videoFrameRate, float videoPixelAspectRatio, Bundle extra) {
mType = type;
mId = id;
mLanguage = language;
+ mDescription = description;
mAudioChannelCount = audioChannelCount;
mAudioSampleRate = audioSampleRate;
mVideoWidth = videoWidth;
mVideoHeight = videoHeight;
mVideoFrameRate = videoFrameRate;
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
mExtra = extra;
}
@@ -67,11 +74,13 @@ public final class TvTrackInfo implements Parcelable {
mType = in.readInt();
mId = in.readString();
mLanguage = in.readString();
+ mDescription = in.readString();
mAudioChannelCount = in.readInt();
mAudioSampleRate = in.readInt();
mVideoWidth = in.readInt();
mVideoHeight = in.readInt();
mVideoFrameRate = in.readFloat();
+ mVideoPixelAspectRatio = in.readFloat();
mExtra = in.readBundle();
}
@@ -99,6 +108,13 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Returns a user readable description for the current track.
+ */
+ public final CharSequence getDescription() {
+ return mDescription;
+ }
+
+ /**
* Returns the audio channel count. Valid only for {@link #TYPE_AUDIO} tracks.
*/
public final int getAudioChannelCount() {
@@ -152,6 +168,17 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Returns the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ */
+ public final float getVideoPixelAspectRatio() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoPixelAspectRatio;
+ }
+
+ /**
* Returns the extra information about the current track.
*/
public final Bundle getExtra() {
@@ -174,11 +201,13 @@ public final class TvTrackInfo implements Parcelable {
dest.writeInt(mType);
dest.writeString(mId);
dest.writeString(mLanguage);
+ dest.writeString(mDescription != null ? mDescription.toString() : null);
dest.writeInt(mAudioChannelCount);
dest.writeInt(mAudioSampleRate);
dest.writeInt(mVideoWidth);
dest.writeInt(mVideoHeight);
dest.writeFloat(mVideoFrameRate);
+ dest.writeFloat(mVideoPixelAspectRatio);
dest.writeBundle(mExtra);
}
@@ -202,11 +231,13 @@ public final class TvTrackInfo implements Parcelable {
private final String mId;
private final int mType;
private String mLanguage;
+ private CharSequence mDescription;
private int mAudioChannelCount;
private int mAudioSampleRate;
private int mVideoWidth;
private int mVideoHeight;
private float mVideoFrameRate;
+ private float mVideoPixelAspectRatio = 1.0f;
private Bundle mExtra;
/**
@@ -217,15 +248,13 @@ public final class TvTrackInfo implements Parcelable {
* @param id The ID of the track that uniquely identifies the current track among all the
* other tracks in the same TV program.
*/
- public Builder(int type, String id) {
+ public Builder(int type, @NonNull String id) {
if (type != TYPE_AUDIO
&& type != TYPE_VIDEO
&& type != TYPE_SUBTITLE) {
throw new IllegalArgumentException("Unknown type: " + type);
}
- if (id == null) {
- throw new IllegalArgumentException("id cannot be null");
- }
+ Preconditions.checkNotNull(id);
mType = type;
mId = id;
}
@@ -241,6 +270,16 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Sets a user readable description for the current track.
+ *
+ * @param description The user readable description.
+ */
+ public final Builder setDescription(CharSequence description) {
+ mDescription = description;
+ return this;
+ }
+
+ /**
* Sets the audio channel count. Valid only for {@link #TYPE_AUDIO} tracks.
*
* @param audioChannelCount The audio channel count.
@@ -310,6 +349,25 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Sets the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * <p>This is needed for applications to be able to scale the video properly for some video
+ * formats such as 720x576 4:3 and 720x576 16:9 where pixels are not square. By default,
+ * applications assume the value of 1.0 (square pixels), so it is not necessary to set the
+ * pixel aspect ratio for most video formats.
+ *
+ * @param videoPixelAspectRatio The pixel aspect ratio of the video.
+ */
+ public final Builder setVideoPixelAspectRatio(float videoPixelAspectRatio) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
+ return this;
+ }
+
+ /**
* Sets the extra information about the current track.
*
* @param extra The extra information.
@@ -325,8 +383,9 @@ public final class TvTrackInfo implements Parcelable {
* @return The new {@link TvTrackInfo} instance
*/
public TvTrackInfo build() {
- return new TvTrackInfo(mType, mId, mLanguage, mAudioChannelCount, mAudioSampleRate,
- mVideoWidth, mVideoHeight, mVideoFrameRate, mExtra);
+ return new TvTrackInfo(mType, mId, mLanguage, mDescription, mAudioChannelCount,
+ mAudioSampleRate, mVideoWidth, mVideoHeight, mVideoFrameRate,
+ mVideoPixelAspectRatio, mExtra);
}
}
-} \ No newline at end of file
+}
diff --git a/media/java/android/media/tv/TvView.java b/media/java/android/media/tv/TvView.java
index 6fc1b82..e7ce1dd 100644
--- a/media/java/android/media/tv/TvView.java
+++ b/media/java/android/media/tv/TvView.java
@@ -16,12 +16,15 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.Region;
+import android.media.PlaybackParams;
import android.media.tv.TvInputManager.Session;
import android.media.tv.TvInputManager.Session.FinishedInputEventCallback;
import android.media.tv.TvInputManager.SessionCallback;
@@ -31,6 +34,7 @@ import android.os.Handler;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
+import android.util.Pair;
import android.view.InputEvent;
import android.view.KeyEvent;
import android.view.MotionEvent;
@@ -42,21 +46,22 @@ import android.view.ViewGroup;
import android.view.ViewRootImpl;
import java.lang.ref.WeakReference;
+import java.util.ArrayDeque;
import java.util.List;
+import java.util.Queue;
/**
* Displays TV contents. The TvView class provides a high level interface for applications to show
* TV programs from various TV sources that implement {@link TvInputService}. (Note that the list of
* TV inputs available on the system can be obtained by calling
* {@link TvInputManager#getTvInputList() TvInputManager.getTvInputList()}.)
- * <p>
- * Once the application supplies the URI for a specific TV channel to {@link #tune(String, Uri)}
+ *
+ * <p>Once the application supplies the URI for a specific TV channel to {@link #tune(String, Uri)}
* method, it takes care of underlying service binding (and unbinding if the current TvView is
* already bound to a service) and automatically allocates/deallocates resources needed. In addition
* to a few essential methods to control how the contents are presented, it also provides a way to
* dispatch input events to the connected TvInputService in order to enable custom key actions for
* the TV input.
- * </p>
*/
public class TvView extends ViewGroup {
private static final String TAG = "TvView";
@@ -66,10 +71,6 @@ public class TvView extends ViewGroup {
private static final int ZORDER_MEDIA_OVERLAY = 1;
private static final int ZORDER_ON_TOP = 2;
- private static final int CAPTION_DEFAULT = 0;
- private static final int CAPTION_ENABLED = 1;
- private static final int CAPTION_DISABLED = 2;
-
private static final WeakReference<TvView> NULL_TV_VIEW = new WeakReference<>(null);
private static final Object sMainTvViewLock = new Object();
@@ -85,11 +86,9 @@ public class TvView extends ViewGroup {
private MySessionCallback mSessionCallback;
private TvInputCallback mCallback;
private OnUnhandledInputEventListener mOnUnhandledInputEventListener;
- private boolean mHasStreamVolume;
- private float mStreamVolume;
- private int mCaptionEnabled;
- private String mAppPrivateCommandAction;
- private Bundle mAppPrivateCommandData;
+ private Float mStreamVolume;
+ private Boolean mCaptionEnabled;
+ private final Queue<Pair<String, Bundle>> mPendingAppPrivateCommands = new ArrayDeque<>();
private boolean mSurfaceChanged;
private int mSurfaceFormat;
@@ -103,6 +102,7 @@ public class TvView extends ViewGroup {
private int mSurfaceViewRight;
private int mSurfaceViewTop;
private int mSurfaceViewBottom;
+ private TimeShiftPositionCallback mTimeShiftPositionCallback;
private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
@Override
@@ -173,27 +173,26 @@ public class TvView extends ViewGroup {
/**
* Sets the callback to be invoked when an event is dispatched to this TvView.
*
- * @param callback The callback to receive events. A value of {@code null} removes any existing
- * callbacks.
+ * @param callback The callback to receive events. A value of {@code null} removes the existing
+ * callback.
*/
- public void setCallback(TvInputCallback callback) {
+ public void setCallback(@Nullable TvInputCallback callback) {
mCallback = callback;
}
/**
* Sets this as the main {@link TvView}.
- * <p>
- * The main {@link TvView} is a {@link TvView} whose corresponding TV input determines the
+ *
+ * <p>The main {@link TvView} is a {@link TvView} whose corresponding TV input determines the
* HDMI-CEC active source device. For an HDMI port input, one of source devices that is
* connected to that HDMI port becomes the active source. For an HDMI-CEC logical device input,
* the corresponding HDMI-CEC logical device becomes the active source. For any non-HDMI input
* (including the tuner, composite, S-Video, etc.), the internal device (= TV itself) becomes
* the active source.
- * </p><p>
- * First tuned {@link TvView} becomes main automatically, and keeps to be main until either
+ *
+ * <p>First tuned {@link TvView} becomes main automatically, and keeps to be main until either
* {@link #reset} is called for the main {@link TvView} or {@link #setMain} is called for other
* {@link TvView}.
- * </p>
* @hide
*/
@SystemApi
@@ -252,13 +251,16 @@ public class TvView extends ViewGroup {
}
/**
- * Sets the relative stream volume of this session to handle a change of audio focus.
+ * Sets the relative stream volume of this TvView.
*
- * @param volume A volume value between 0.0f to 1.0f.
+ * <p>This method is primarily used to handle audio focus changes or mute a specific TvView when
+ * multiple views are displayed. If the method has not yet been called, the TvView assumes the
+ * default value of {@code 1.0f}.
+ *
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
*/
public void setStreamVolume(float volume) {
if (DEBUG) Log.d(TAG, "setStreamVolume(" + volume + ")");
- mHasStreamVolume = true;
mStreamVolume = volume;
if (mSession == null) {
// Volume will be set once the connection has been made.
@@ -273,7 +275,7 @@ public class TvView extends ViewGroup {
* @param inputId The ID of TV input which will play the given channel.
* @param channelUri The URI of a channel.
*/
- public void tune(String inputId, Uri channelUri) {
+ public void tune(@NonNull String inputId, Uri channelUri) {
tune(inputId, channelUri, null);
}
@@ -321,8 +323,8 @@ public class TvView extends ViewGroup {
/**
* Resets this TvView.
- * <p>
- * This method is primarily used to un-tune the current TvView.
+ *
+ * <p>This method is primarily used to un-tune the current TvView.
*/
public void reset() {
if (DEBUG) Log.d(TAG, "reset()");
@@ -343,30 +345,46 @@ public class TvView extends ViewGroup {
/**
* Requests to unblock TV content according to the given rating.
- * <p>
- * This notifies TV input that blocked content is now OK to play.
- * </p>
+ *
+ * <p>This notifies TV input that blocked content is now OK to play.
*
* @param unblockedRating A TvContentRating to unblock.
* @see TvInputService.Session#notifyContentBlocked(TvContentRating)
* @hide
+ * @deprecated Use {@link #unblockContent} instead.
*/
+ @Deprecated
@SystemApi
public void requestUnblockContent(TvContentRating unblockedRating) {
+ unblockContent(unblockedRating);
+ }
+
+ /**
+ * Requests to unblock TV content according to the given rating.
+ *
+ * <p>This notifies TV input that blocked content is now OK to play.
+ *
+ * @param unblockedRating A TvContentRating to unblock.
+ * @see TvInputService.Session#notifyContentBlocked(TvContentRating)
+ * @hide
+ */
+ @SystemApi
+ public void unblockContent(TvContentRating unblockedRating) {
if (mSession != null) {
- mSession.requestUnblockContent(unblockedRating);
+ mSession.unblockContent(unblockedRating);
}
}
/**
* Enables or disables the caption in this TvView.
- * <p>
- * Note that this method does not take any effect unless the current TvView is tuned.
+ *
+ * <p>Note that this method does not take any effect unless the current TvView is tuned.
*
* @param enabled {@code true} to enable, {@code false} to disable.
*/
public void setCaptionEnabled(boolean enabled) {
- mCaptionEnabled = enabled ? CAPTION_ENABLED : CAPTION_DISABLED;
+ if (DEBUG) Log.d(TAG, "setCaptionEnabled(" + enabled + ")");
+ mCaptionEnabled = enabled;
if (mSession != null) {
mSession.setCaptionEnabled(enabled);
}
@@ -420,6 +438,66 @@ public class TvView extends ViewGroup {
}
/**
+ * Pauses playback. No-op if it is already paused. Call {@link #timeShiftResume} to resume.
+ */
+ public void timeShiftPause() {
+ if (mSession != null) {
+ mSession.timeShiftPause();
+ }
+ }
+
+ /**
+ * Resumes playback. No-op if it is already resumed. Call {@link #timeShiftPause} to pause.
+ */
+ public void timeShiftResume() {
+ if (mSession != null) {
+ mSession.timeShiftResume();
+ }
+ }
+
+ /**
+ * Seeks to a specified time position. {@code timeMs} must be equal to or greater than the start
+ * position returned by {@link TimeShiftPositionCallback#onTimeShiftStartPositionChanged} and
+ * equal to or less than the current time.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ */
+ public void timeShiftSeekTo(long timeMs) {
+ if (mSession != null) {
+ mSession.timeShiftSeekTo(timeMs);
+ }
+ }
+
+ /**
+ * Sets playback rate using {@link android.media.PlaybackParams}.
+ *
+ * @param params The playback params.
+ */
+ public void timeShiftSetPlaybackParams(@NonNull PlaybackParams params) {
+ if (mSession != null) {
+ mSession.timeShiftSetPlaybackParams(params);
+ }
+ }
+
+ /**
+ * Sets the callback to be invoked when the time shift position is changed.
+ *
+ * @param callback The callback to receive time shift position changes. A value of {@code null}
+ * removes the existing callback.
+ */
+ public void setTimeShiftPositionCallback(@Nullable TimeShiftPositionCallback callback) {
+ mTimeShiftPositionCallback = callback;
+ ensurePositionTracking();
+ }
+
+ private void ensurePositionTracking() {
+ if (mSession == null) {
+ return;
+ }
+ mSession.timeShiftEnablePositionTracking(mTimeShiftPositionCallback != null);
+ }
+
+ /**
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
@@ -430,26 +508,23 @@ public class TvView extends ViewGroup {
* @hide
*/
@SystemApi
- public void sendAppPrivateCommand(String action, Bundle data) {
+ public void sendAppPrivateCommand(@NonNull String action, Bundle data) {
if (TextUtils.isEmpty(action)) {
throw new IllegalArgumentException("action cannot be null or an empty string");
}
if (mSession != null) {
mSession.sendAppPrivateCommand(action, data);
} else {
- Log.w(TAG, "sendAppPrivateCommand - session not created (action " + action + " cached)");
- if (mAppPrivateCommandAction != null) {
- Log.w(TAG, "previous cached action " + action + " removed");
- }
- mAppPrivateCommandAction = action;
- mAppPrivateCommandData = data;
+ Log.w(TAG, "sendAppPrivateCommand - session not yet created (action \"" + action
+ + "\" pending)");
+ mPendingAppPrivateCommands.add(Pair.create(action, data));
}
}
/**
* Dispatches an unhandled input event to the next receiver.
- * <p>
- * Except system keys, TvView always consumes input events in the normal flow. This is called
+ *
+ * <p>Except system keys, TvView always consumes input events in the normal flow. This is called
* asynchronously from where the event is dispatched. It gives the host application a chance to
* dispatch the unhandled input events.
*
@@ -664,8 +739,7 @@ public class TvView extends ViewGroup {
}
private void release() {
- mAppPrivateCommandAction = null;
- mAppPrivateCommandData = null;
+ mPendingAppPrivateCommands.clear();
setSessionSurface(null);
removeSessionOverlayView();
@@ -729,6 +803,43 @@ public class TvView extends ViewGroup {
}
/**
+ * Callback used to receive time shift position changes.
+ */
+ public abstract static class TimeShiftPositionCallback {
+
+ /**
+ * This is called when the start playback position is changed.
+ *
+ * <p>The start playback position of the time shifted program can be adjusted by the TV
+ * input when it cannot retain the whole recorded program due to some reason (e.g.
+ * limitation on storage space). The application should not allow the user to seek to a
+ * position earlier than the start position.
+ *
+ * <p>Note that {@code timeMs} is not relative time in the program but wall-clock time,
+ * which is intended to avoid calling this method unnecessarily around program boundaries.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The start playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(String inputId, long timeMs) {
+ }
+
+ /**
+ * This is called when the current playback position is changed.
+ *
+ * <p>Note that {@code timeMs} is not relative time in the program but wall-clock time,
+ * which is intended to avoid calling this method unnecessarily around program boundaries.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The current playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(String inputId, long timeMs) {
+ }
+ }
+
+ /**
* Callback used to receive various status updates on the {@link TvView}.
*/
public abstract static class TvInputCallback {
@@ -811,6 +922,7 @@ public class TvView extends ViewGroup {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onVideoUnavailable(String inputId, int reason) {
@@ -838,6 +950,7 @@ public class TvView extends ViewGroup {
/**
* This is invoked when a custom event from the bound TV input is sent to this view.
*
+ * @param inputId The ID of the TV input bound to this view.
* @param eventType The type of the event.
* @param eventArgs Optional arguments of the event.
* @hide
@@ -845,6 +958,20 @@ public class TvView extends ViewGroup {
@SystemApi
public void onEvent(String inputId, String eventType, Bundle eventArgs) {
}
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(String inputId, int status) {
+ }
}
/**
@@ -853,8 +980,8 @@ public class TvView extends ViewGroup {
public interface OnUnhandledInputEventListener {
/**
* Called when an input event was not handled by the bound TV input.
- * <p>
- * This is called asynchronously from where the event is dispatched. It gives the host
+ *
+ * <p>This is called asynchronously from where the event is dispatched. It gives the host
* application a chance to handle the unhandled input events.
*
* @param event The input event.
@@ -890,6 +1017,12 @@ public class TvView extends ViewGroup {
}
mSession = session;
if (session != null) {
+ // Sends the pending app private commands first.
+ for (Pair<String, Bundle> command : mPendingAppPrivateCommands) {
+ mSession.sendAppPrivateCommand(command.first, command.second);
+ }
+ mPendingAppPrivateCommands.clear();
+
synchronized (sMainTvViewLock) {
if (hasWindowFocus() && TvView.this == sMainTvView.get()) {
mSession.setMain();
@@ -905,19 +1038,14 @@ public class TvView extends ViewGroup {
}
}
createSessionOverlayView();
- if (mCaptionEnabled != CAPTION_DEFAULT) {
- mSession.setCaptionEnabled(mCaptionEnabled == CAPTION_ENABLED);
- }
- mSession.tune(mChannelUri, mTuneParams);
- if (mHasStreamVolume) {
+ if (mStreamVolume != null) {
mSession.setStreamVolume(mStreamVolume);
}
- if (mAppPrivateCommandAction != null) {
- mSession.sendAppPrivateCommand(
- mAppPrivateCommandAction, mAppPrivateCommandData);
- mAppPrivateCommandAction = null;
- mAppPrivateCommandData = null;
+ if (mCaptionEnabled != null) {
+ mSession.setCaptionEnabled(mCaptionEnabled);
}
+ mSession.tune(mChannelUri, mTuneParams);
+ ensurePositionTracking();
} else {
mSessionCallback = null;
if (mCallback != null) {
@@ -1087,5 +1215,47 @@ public class TvView extends ViewGroup {
mCallback.onEvent(mInputId, eventType, eventArgs);
}
}
+
+ @Override
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStatusChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStatusChanged - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onTimeShiftStatusChanged(mInputId, status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStartPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStartPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftStartPositionChanged(mInputId, timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftCurrentPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftCurrentPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftCurrentPositionChanged(mInputId, timeMs);
+ }
+ }
}
}
diff --git a/media/java/android/mtp/MtpDatabase.java b/media/java/android/mtp/MtpDatabase.java
index 5d9355a..3541fba 100755
--- a/media/java/android/mtp/MtpDatabase.java
+++ b/media/java/android/mtp/MtpDatabase.java
@@ -28,7 +28,6 @@ import android.database.sqlite.SQLiteDatabase;
import android.media.MediaScanner;
import android.net.Uri;
import android.os.BatteryManager;
-import android.os.BatteryStats;
import android.os.RemoteException;
import android.provider.MediaStore;
import android.provider.MediaStore.Audio;
diff --git a/media/java/android/mtp/MtpStorage.java b/media/java/android/mtp/MtpStorage.java
index e20eabc..3641ff5 100644
--- a/media/java/android/mtp/MtpStorage.java
+++ b/media/java/android/mtp/MtpStorage.java
@@ -38,7 +38,7 @@ public class MtpStorage {
public MtpStorage(StorageVolume volume, Context context) {
mStorageId = volume.getStorageId();
mPath = volume.getPath();
- mDescription = context.getResources().getString(volume.getDescriptionId());
+ mDescription = volume.getDescription(context);
mReserveSpace = volume.getMtpReserveSpace() * 1024L * 1024L;
mRemovable = volume.isRemovable();
mMaxFileSize = volume.getMaxFileSize();
@@ -59,7 +59,7 @@ public class MtpStorage {
*
* @return the storage ID
*/
- public static int getStorageId(int index) {
+ public static int getStorageIdForIndex(int index) {
// storage ID is 0x00010001 for primary storage,
// then 0x00020001, 0x00030001, etc. for secondary storages
return ((index + 1) << 16) + 1;
diff --git a/media/java/android/service/media/IMediaBrowserService.aidl b/media/java/android/service/media/IMediaBrowserService.aidl
index 01285ee..f01fc07 100644
--- a/media/java/android/service/media/IMediaBrowserService.aidl
+++ b/media/java/android/service/media/IMediaBrowserService.aidl
@@ -6,6 +6,7 @@ import android.content.res.Configuration;
import android.service.media.IMediaBrowserServiceCallbacks;
import android.net.Uri;
import android.os.Bundle;
+import android.os.ResultReceiver;
/**
* Media API allows clients to browse through hierarchy of a user’s media collection,
@@ -18,4 +19,5 @@ oneway interface IMediaBrowserService {
void addSubscription(String uri, IMediaBrowserServiceCallbacks callbacks);
void removeSubscription(String uri, IMediaBrowserServiceCallbacks callbacks);
+ void getMediaItem(String uri, in ResultReceiver cb);
} \ No newline at end of file
diff --git a/media/java/android/service/media/MediaBrowserService.java b/media/java/android/service/media/MediaBrowserService.java
index 26aedbd..41156cb 100644
--- a/media/java/android/service/media/MediaBrowserService.java
+++ b/media/java/android/service/media/MediaBrowserService.java
@@ -16,7 +16,6 @@
package android.service.media;
-import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.SdkConstant;
@@ -32,8 +31,10 @@ import android.os.Bundle;
import android.os.IBinder;
import android.os.Handler;
import android.os.RemoteException;
+import android.os.ResultReceiver;
import android.service.media.IMediaBrowserService;
import android.service.media.IMediaBrowserServiceCallbacks;
+import android.text.TextUtils;
import android.util.ArrayMap;
import android.util.Log;
@@ -74,6 +75,13 @@ public abstract class MediaBrowserService extends Service {
@SdkConstant(SdkConstantType.SERVICE_ACTION)
public static final String SERVICE_INTERFACE = "android.media.browse.MediaBrowserService";
+ /**
+ * A key for passing the MediaItem to the ResultReceiver in getMediaItem.
+ *
+ * @hide
+ */
+ public static final String KEY_MEDIA_ITEM = "media_item";
+
private final ArrayMap<IBinder, ConnectionRecord> mConnections = new ArrayMap();
private final Handler mHandler = new Handler();
private ServiceBinder mBinder;
@@ -261,6 +269,33 @@ public abstract class MediaBrowserService extends Service {
}
});
}
+
+ @Override
+ public void getMediaItem(final String mediaId, final ResultReceiver receiver) {
+ if (TextUtils.isEmpty(mediaId) || receiver == null) {
+ return;
+ }
+
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ final Result<MediaBrowser.MediaItem> result
+ = new Result<MediaBrowser.MediaItem>(mediaId) {
+ @Override
+ void onResultSent(MediaBrowser.MediaItem item) {
+ Bundle bundle = new Bundle();
+ bundle.putParcelable(KEY_MEDIA_ITEM, item);
+ receiver.send(0, bundle);
+ }
+ };
+ try {
+ MediaBrowserService.this.getMediaItem(mediaId, result);
+ } catch (UnsupportedOperationException e) {
+ receiver.send(-1, null);
+ }
+ }
+ });
+ }
}
@Override
@@ -284,20 +319,21 @@ public abstract class MediaBrowserService extends Service {
/**
* Called to get the root information for browsing by a particular client.
* <p>
- * The implementation should verify that the client package has
- * permission to access browse media information before returning
- * the root id; it should return null if the client is not
- * allowed to access this information.
+ * The implementation should verify that the client package has permission
+ * to access browse media information before returning the root id; it
+ * should return null if the client is not allowed to access this
+ * information.
* </p>
*
- * @param clientPackageName The package name of the application
- * which is requesting access to browse media.
- * @param clientUid The uid of the application which is requesting
- * access to browse media.
+ * @param clientPackageName The package name of the application which is
+ * requesting access to browse media.
+ * @param clientUid The uid of the application which is requesting access to
+ * browse media.
* @param rootHints An optional bundle of service-specific arguments to send
- * to the media browse service when connecting and retrieving the root id
- * for browsing, or null if none. The contents of this bundle may affect
- * the information returned when browsing.
+ * to the media browse service when connecting and retrieving the
+ * root id for browsing, or null if none. The contents of this
+ * bundle may affect the information returned when browsing.
+ * @return The {@link BrowserRoot} for accessing this app's content or null.
*/
public abstract @Nullable BrowserRoot onGetRoot(@NonNull String clientPackageName,
int clientUid, @Nullable Bundle rootHints);
@@ -305,24 +341,51 @@ public abstract class MediaBrowserService extends Service {
/**
* Called to get information about the children of a media item.
* <p>
- * Implementations must call result.{@link Result#sendResult result.sendResult} with the list
- * of children. If loading the children will be an expensive operation that should be performed
- * on another thread, result.{@link Result#detach result.detach} may be called before returning
- * from this function, and then {@link Result#sendResult result.sendResult} called when
- * the loading is complete.
+ * Implementations must call {@link Result#sendResult result.sendResult}
+ * with the list of children. If loading the children will be an expensive
+ * operation that should be performed on another thread,
+ * {@link Result#detach result.detach} may be called before returning from
+ * this function, and then {@link Result#sendResult result.sendResult}
+ * called when the loading is complete.
*
- * @param parentId The id of the parent media item whose
- * children are to be queried.
- * @return The list of children, or null if the id is invalid.
+ * @param parentId The id of the parent media item whose children are to be
+ * queried.
+ * @param result The Result to send the list of children to, or null if the
+ * id is invalid.
*/
public abstract void onLoadChildren(@NonNull String parentId,
@NonNull Result<List<MediaBrowser.MediaItem>> result);
/**
+ * Called to get a specific media item. The mediaId should be the same id
+ * that would be returned for this item when it is in a list of child items.
+ * <p>
+ * Implementations must call {@link Result#sendResult result.sendResult}. If
+ * loading the item will be an expensive operation {@link Result#detach
+ * result.detach} may be called before returning from this function, and
+ * then {@link Result#sendResult result.sendResult} called when the item has
+ * been loaded.
+ * <p>
+ * The default implementation throws an exception.
+ *
+ * @param mediaId The id for the specific
+ * {@link android.media.browse.MediaBrowser.MediaItem}.
+ * @param result The Result to send the item to, or null if the id is
+ * invalid.
+ * @throws UnsupportedOperationException
+ */
+ public void getMediaItem(String mediaId, Result<MediaBrowser.MediaItem> result)
+ throws UnsupportedOperationException {
+ throw new UnsupportedOperationException("getMediaItem is not supported.");
+ }
+
+ /**
* Call to set the media session.
* <p>
* This should be called as soon as possible during the service's startup.
* It may only be called once.
+ *
+ * @param token The token for the service's {@link MediaSession}.
*/
public void setSessionToken(final MediaSession.Token token) {
if (token == null) {
@@ -405,12 +468,10 @@ public abstract class MediaBrowserService extends Service {
*/
private void addSubscription(String id, ConnectionRecord connection) {
// Save the subscription
- final boolean added = connection.subscriptions.add(id);
+ connection.subscriptions.add(id);
- // If this is a new subscription, send the results
- if (added) {
- performLoadChildren(id, connection);
- }
+ // send the results
+ performLoadChildren(id, connection);
}
/**
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 4ebbe26..51d0140 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -2,21 +2,25 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
+ android_media_AmrInputStream.cpp \
+ android_media_ImageWriter.cpp \
android_media_ImageReader.cpp \
android_media_MediaCrypto.cpp \
android_media_MediaCodec.cpp \
android_media_MediaCodecList.cpp \
+ android_media_MediaDataSource.cpp \
android_media_MediaDrm.cpp \
android_media_MediaExtractor.cpp \
android_media_MediaHTTPConnection.cpp \
+ android_media_MediaMetadataRetriever.cpp \
android_media_MediaMuxer.cpp \
android_media_MediaPlayer.cpp \
+ android_media_MediaProfiles.cpp \
android_media_MediaRecorder.cpp \
android_media_MediaScanner.cpp \
- android_media_MediaMetadataRetriever.cpp \
+ android_media_MediaSync.cpp \
android_media_ResampleInputStream.cpp \
- android_media_MediaProfiles.cpp \
- android_media_AmrInputStream.cpp \
+ android_media_SyncParams.cpp \
android_media_Utils.cpp \
android_mtp_MtpDatabase.cpp \
android_mtp_MtpDevice.cpp \
@@ -40,7 +44,7 @@ LOCAL_SHARED_LIBRARIES := \
libusbhost \
libjhead \
libexif \
- libstagefright_amrnb_common \
+ libstagefright_amrnb_common
LOCAL_REQUIRED_MODULES := \
libjhead_jni
@@ -52,6 +56,7 @@ LOCAL_C_INCLUDES += \
external/libexif/ \
external/tremor/Tremor \
frameworks/base/core/jni \
+ frameworks/base/libs/hwui \
frameworks/av/media/libmedia \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/codecs/amrnb/enc/src \
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index 5406130..49614bd 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -24,6 +24,7 @@
#include <cstdio>
#include <gui/CpuConsumer.h>
+#include <gui/BufferItemConsumer.h>
#include <gui/Surface.h>
#include <camera3.h>
@@ -39,7 +40,7 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
#define ANDROID_MEDIA_IMAGEREADER_CTX_JNI_ID "mNativeContext"
-#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mLockedBuffer"
+#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mNativeBuffer"
#define ANDROID_MEDIA_SURFACEIMAGE_TS_JNI_ID "mTimestamp"
// ----------------------------------------------------------------------------
@@ -62,7 +63,7 @@ static struct {
} gImageReaderClassInfo;
static struct {
- jfieldID mLockedBuffer;
+ jfieldID mNativeBuffer;
jfieldID mTimestamp;
} gSurfaceImageClassInfo;
@@ -73,7 +74,7 @@ static struct {
// ----------------------------------------------------------------------------
-class JNIImageReaderContext : public CpuConsumer::FrameAvailableListener
+class JNIImageReaderContext : public ConsumerBase::FrameAvailableListener
{
public:
JNIImageReaderContext(JNIEnv* env, jobject weakThiz, jclass clazz, int maxImages);
@@ -83,18 +84,28 @@ public:
virtual void onFrameAvailable(const BufferItem& item);
CpuConsumer::LockedBuffer* getLockedBuffer();
-
void returnLockedBuffer(CpuConsumer::LockedBuffer* buffer);
+ BufferItem* getOpaqueBuffer();
+ void returnOpaqueBuffer(BufferItem* buffer);
+
void setCpuConsumer(const sp<CpuConsumer>& consumer) { mConsumer = consumer; }
CpuConsumer* getCpuConsumer() { return mConsumer.get(); }
+ void setOpaqueConsumer(const sp<BufferItemConsumer>& consumer) { mOpaqueConsumer = consumer; }
+ BufferItemConsumer* getOpaqueConsumer() { return mOpaqueConsumer.get(); }
+ // This is the only opaque format exposed in the ImageFormat public API.
+ bool isOpaque() { return mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; }
+
void setProducer(const sp<IGraphicBufferProducer>& producer) { mProducer = producer; }
IGraphicBufferProducer* getProducer() { return mProducer.get(); }
void setBufferFormat(int format) { mFormat = format; }
int getBufferFormat() { return mFormat; }
+ void setBufferDataspace(android_dataspace dataSpace) { mDataSpace = dataSpace; }
+ android_dataspace getBufferDataspace() { return mDataSpace; }
+
void setBufferWidth(int width) { mWidth = width; }
int getBufferWidth() { return mWidth; }
@@ -106,11 +117,14 @@ private:
static void detachJNI();
List<CpuConsumer::LockedBuffer*> mBuffers;
+ List<BufferItem*> mOpaqueBuffers;
sp<CpuConsumer> mConsumer;
+ sp<BufferItemConsumer> mOpaqueConsumer;
sp<IGraphicBufferProducer> mProducer;
jobject mWeakThiz;
jclass mClazz;
int mFormat;
+ android_dataspace mDataSpace;
int mWidth;
int mHeight;
};
@@ -121,7 +135,9 @@ JNIImageReaderContext::JNIImageReaderContext(JNIEnv* env,
mClazz((jclass)env->NewGlobalRef(clazz)) {
for (int i = 0; i < maxImages; i++) {
CpuConsumer::LockedBuffer *buffer = new CpuConsumer::LockedBuffer;
+ BufferItem* opaqueBuffer = new BufferItem;
mBuffers.push_back(buffer);
+ mOpaqueBuffers.push_back(opaqueBuffer);
}
}
@@ -165,6 +181,21 @@ void JNIImageReaderContext::returnLockedBuffer(CpuConsumer::LockedBuffer* buffer
mBuffers.push_back(buffer);
}
+BufferItem* JNIImageReaderContext::getOpaqueBuffer() {
+ if (mOpaqueBuffers.empty()) {
+ return NULL;
+ }
+ // Return an opaque buffer pointer and remove it from the list
+ List<BufferItem*>::iterator it = mOpaqueBuffers.begin();
+ BufferItem* buffer = *it;
+ mOpaqueBuffers.erase(it);
+ return buffer;
+}
+
+void JNIImageReaderContext::returnOpaqueBuffer(BufferItem* buffer) {
+ mOpaqueBuffers.push_back(buffer);
+}
+
JNIImageReaderContext::~JNIImageReaderContext() {
bool needsDetach = false;
JNIEnv* env = getJNIEnv(&needsDetach);
@@ -183,8 +214,20 @@ JNIImageReaderContext::~JNIImageReaderContext() {
it != mBuffers.end(); it++) {
delete *it;
}
+
+ // Delete opaque buffers
+ for (List<BufferItem *>::iterator it = mOpaqueBuffers.begin();
+ it != mOpaqueBuffers.end(); it++) {
+ delete *it;
+ }
+
mBuffers.clear();
- mConsumer.clear();
+ if (mConsumer != 0) {
+ mConsumer.clear();
+ }
+ if (mOpaqueConsumer != 0) {
+ mOpaqueConsumer.clear();
+ }
}
void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
@@ -206,6 +249,11 @@ void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
extern "C" {
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
static JNIImageReaderContext* ImageReader_getContext(JNIEnv* env, jobject thiz)
{
JNIImageReaderContext *ctx;
@@ -222,6 +270,13 @@ static CpuConsumer* ImageReader_getCpuConsumer(JNIEnv* env, jobject thiz)
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
+ if (ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque ImageReader doesn't support this method");
+ return NULL;
+ }
+
return ctx->getCpuConsumer();
}
@@ -233,6 +288,7 @@ static IGraphicBufferProducer* ImageReader_getProducer(JNIEnv* env, jobject thiz
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
return ctx->getProducer();
}
@@ -254,36 +310,19 @@ static void ImageReader_setNativeContext(JNIEnv* env,
static CpuConsumer::LockedBuffer* Image_getLockedBuffer(JNIEnv* env, jobject image)
{
return reinterpret_cast<CpuConsumer::LockedBuffer*>(
- env->GetLongField(image, gSurfaceImageClassInfo.mLockedBuffer));
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
}
static void Image_setBuffer(JNIEnv* env, jobject thiz,
const CpuConsumer::LockedBuffer* buffer)
{
- env->SetLongField(thiz, gSurfaceImageClassInfo.mLockedBuffer, reinterpret_cast<jlong>(buffer));
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
}
-// Some formats like JPEG defined with different values between android.graphics.ImageFormat and
-// graphics.h, need convert to the one defined in graphics.h here.
-static int Image_getPixelFormat(JNIEnv* env, int format)
+static void Image_setOpaqueBuffer(JNIEnv* env, jobject thiz,
+ const BufferItem* buffer)
{
- int jpegFormat;
- jfieldID fid;
-
- ALOGV("%s: format = 0x%x", __FUNCTION__, format);
-
- jclass imageFormatClazz = env->FindClass("android/graphics/ImageFormat");
- ALOG_ASSERT(imageFormatClazz != NULL);
-
- fid = env->GetStaticFieldID(imageFormatClazz, "JPEG", "I");
- jpegFormat = env->GetStaticIntField(imageFormatClazz, fid);
-
- // Translate the JPEG to BLOB for camera purpose.
- if (format == jpegFormat) {
- format = HAL_PIXEL_FORMAT_BLOB;
- }
-
- return format;
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
}
static uint32_t Image_getJpegSize(CpuConsumer::LockedBuffer* buffer, bool usingRGBAOverride)
@@ -430,7 +469,7 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
pData = buffer->data;
dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
break;
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
// Single plane 16bpp bayer data.
bytesPerPixel = 2;
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
@@ -450,6 +489,19 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
pData = buffer->data;
dataSize = buffer->stride * buffer->height;
break;
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 12 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 12 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ break;
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_RGBX_8888:
// Single plane, 32bpp.
@@ -483,7 +535,7 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
}
static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* buffer, int idx,
- int32_t readerFormat)
+ int32_t halReaderFormat)
{
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
ALOG_ASSERT((idx < IMAGE_READER_MAX_NUM_PLANES) && (idx >= 0), "Index is out of range:%d", idx);
@@ -493,7 +545,7 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
int32_t fmt = buffer->flexFormat;
- fmt = applyFormatOverrides(fmt, readerFormat);
+ fmt = applyFormatOverrides(fmt, halReaderFormat);
switch (fmt) {
case HAL_PIXEL_FORMAT_YCbCr_420_888:
@@ -511,13 +563,15 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
break;
case HAL_PIXEL_FORMAT_BLOB:
case HAL_PIXEL_FORMAT_RAW10:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Blob is used for JPEG data, RAW10 and RAW12 is used for 10-bit and 12-bit raw data,
+ // those are single plane data with pixel stride 0 since they don't really have a
+ // well defined pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
pixelStride = 0;
break;
case HAL_PIXEL_FORMAT_Y16:
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RGB_565:
// Single plane 16bpp data.
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
@@ -543,7 +597,7 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
}
static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buffer, int idx,
- int32_t readerFormat)
+ int32_t halReaderFormat)
{
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
ALOG_ASSERT((idx < IMAGE_READER_MAX_NUM_PLANES) && (idx >= 0));
@@ -553,7 +607,7 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
int32_t fmt = buffer->flexFormat;
- fmt = applyFormatOverrides(fmt, readerFormat);
+ fmt = applyFormatOverrides(fmt, halReaderFormat);
switch (fmt) {
case HAL_PIXEL_FORMAT_YCbCr_420_888:
@@ -568,12 +622,14 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
rowStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
break;
case HAL_PIXEL_FORMAT_BLOB:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ // Blob is used for JPEG data. It is single plane and has 0 row stride and
+ // 0 pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = 0;
break;
case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW12:
+ // RAW10 and RAW12 are used for 10-bit and 12-bit raw data, they are single plane
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = buffer->stride;
break;
@@ -584,7 +640,7 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
rowStride = buffer->stride;
break;
case HAL_PIXEL_FORMAT_Y16:
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
// In native side, strides are specified in pixels, not in bytes.
// Single plane 16bpp bayer data. even width/height,
// row stride multiple of 16 pixels (32 bytes)
@@ -635,6 +691,52 @@ static int Image_getBufferHeight(CpuConsumer::LockedBuffer* buffer) {
return buffer->height;
}
+// --------------------------Methods for opaque Image and ImageReader----------
+
+static BufferItemConsumer* ImageReader_getOpaqueConsumer(JNIEnv* env, jobject thiz)
+{
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* const ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return NULL;
+ }
+
+ if (!ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Non-opaque ImageReader doesn't support this method");
+ }
+
+ return ctx->getOpaqueConsumer();
+}
+
+static BufferItem* Image_getOpaqueBuffer(JNIEnv* env, jobject image)
+{
+ return reinterpret_cast<BufferItem*>(
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
+}
+
+static int Image_getOpaqueBufferWidth(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getWidth();
+ }
+ return buffer->mGraphicBuffer->getWidth();
+}
+
+static int Image_getOpaqueBufferHeight(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getHeight();
+ }
+
+ return buffer->mGraphicBuffer->getHeight();
+}
+
+
+
// ----------------------------------------------------------------------------
static void ImageReader_classInit(JNIEnv* env, jclass clazz)
@@ -644,9 +746,9 @@ static void ImageReader_classInit(JNIEnv* env, jclass clazz)
jclass imageClazz = env->FindClass("android/media/ImageReader$SurfaceImage");
LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
"can't find android/graphics/ImageReader$SurfaceImage");
- gSurfaceImageClassInfo.mLockedBuffer = env->GetFieldID(
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
imageClazz, ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID, "J");
- LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mLockedBuffer == NULL,
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
"can't find android/graphics/ImageReader.%s",
ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID);
@@ -682,22 +784,16 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
{
status_t res;
int nativeFormat;
+ android_dataspace nativeDataspace;
ALOGV("%s: width:%d, height: %d, format: 0x%x, maxImages:%d",
__FUNCTION__, width, height, format, maxImages);
- nativeFormat = Image_getPixelFormat(env, format);
-
- sp<IGraphicBufferProducer> gbProducer;
- sp<IGraphicBufferConsumer> gbConsumer;
- BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
- sp<CpuConsumer> consumer = new CpuConsumer(gbConsumer, maxImages,
- /*controlledByApp*/true);
- // TODO: throw dvm exOutOfMemoryError?
- if (consumer == NULL) {
- jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
- return;
- }
+ PublicFormat publicFormat = static_cast<PublicFormat>(format);
+ nativeFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ publicFormat);
+ nativeDataspace = android_view_Surface_mapPublicFormatToHalDataspace(
+ publicFormat);
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
@@ -705,25 +801,80 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
return;
}
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
- ctx->setCpuConsumer(consumer);
+
+ sp<IGraphicBufferProducer> gbProducer;
+ sp<IGraphicBufferConsumer> gbConsumer;
+ BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+ sp<ConsumerBase> consumer;
+ sp<CpuConsumer> cpuConsumer;
+ sp<BufferItemConsumer> opaqueConsumer;
+ if (isFormatOpaque(nativeFormat)) {
+ // Use the SW_READ_NEVER usage to tell producer that this format is not for preview or video
+ // encoding. The only possibility will be ZSL output.
+ opaqueConsumer =
+ new BufferItemConsumer(gbConsumer, GRALLOC_USAGE_SW_READ_NEVER, maxImages,
+ /*controlledByApp*/true);
+ if (opaqueConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native opaque consumer");
+ return;
+ }
+ ctx->setOpaqueConsumer(opaqueConsumer);
+ consumer = opaqueConsumer;
+ } else {
+ cpuConsumer = new CpuConsumer(gbConsumer, maxImages, /*controlledByApp*/true);
+ // TODO: throw dvm exOutOfMemoryError?
+ if (cpuConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
+ return;
+ }
+ ctx->setCpuConsumer(cpuConsumer);
+ consumer = cpuConsumer;
+ }
+
ctx->setProducer(gbProducer);
consumer->setFrameAvailableListener(ctx);
ImageReader_setNativeContext(env, thiz, ctx);
ctx->setBufferFormat(nativeFormat);
+ ctx->setBufferDataspace(nativeDataspace);
ctx->setBufferWidth(width);
ctx->setBufferHeight(height);
- // Set the width/height/format to the CpuConsumer
- res = consumer->setDefaultBufferSize(width, height);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer size");
- return;
- }
- res = consumer->setDefaultBufferFormat(nativeFormat);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer format");
+ // Set the width/height/format/dataspace to the CpuConsumer
+ // TODO: below code can be simplified once b/19977701 is fixed.
+ if (isFormatOpaque(nativeFormat)) {
+ res = opaqueConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer size");
+ return;
+ }
+ res = opaqueConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer format");
+ }
+ res = opaqueConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer dataSpace");
+ }
+ } else {
+ res = cpuConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer size");
+ return;
+ }
+ res = cpuConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer format");
+ }
+ res = cpuConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer dataSpace");
+ }
}
}
@@ -737,7 +888,13 @@ static void ImageReader_close(JNIEnv* env, jobject thiz)
return;
}
- CpuConsumer* consumer = ImageReader_getCpuConsumer(env, thiz);
+ ConsumerBase* consumer = NULL;
+ if (ctx->isOpaque()) {
+ consumer = ImageReader_getOpaqueConsumer(env, thiz);
+ } else {
+ consumer = ImageReader_getCpuConsumer(env, thiz);
+ }
+
if (consumer != NULL) {
consumer->abandon();
consumer->setFrameAvailableListener(NULL);
@@ -754,27 +911,66 @@ static void ImageReader_imageRelease(JNIEnv* env, jobject thiz, jobject image)
return;
}
- CpuConsumer* consumer = ctx->getCpuConsumer();
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
- if (!buffer) {
- ALOGW("Image already released!!!");
- return;
+ if (ctx->isOpaque()) {
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ opaqueConsumer->releaseBuffer(*opaqueBuffer); // Not using fence for now.
+ Image_setOpaqueBuffer(env, image, NULL);
+ ctx->returnOpaqueBuffer(opaqueBuffer);
+ ALOGV("%s: Opaque Image has been released", __FUNCTION__);
+ } else {
+ CpuConsumer* consumer = ctx->getCpuConsumer();
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
+ if (!buffer) {
+ ALOGW("Image already released!!!");
+ return;
+ }
+ consumer->unlockBuffer(*buffer);
+ Image_setBuffer(env, image, NULL);
+ ctx->returnLockedBuffer(buffer);
+ ALOGV("%s: Image (format: 0x%x) has been released", __FUNCTION__, ctx->getBufferFormat());
}
- consumer->unlockBuffer(*buffer);
- Image_setBuffer(env, image, NULL);
- ctx->returnLockedBuffer(buffer);
}
-static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
- jobject image)
-{
+static jint ImageReader_opaqueImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
ALOGV("%s:", __FUNCTION__);
- JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
- if (ctx == NULL) {
+ if (ctx == NULL || !ctx->isOpaque()) {
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return -1;
}
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* buffer = ctx->getOpaqueBuffer();
+ if (buffer == NULL) {
+ ALOGW("Unable to acquire a buffer item, very likely client tried to acquire more than"
+ " maxImages buffers");
+ return ACQUIRE_MAX_IMAGES;
+ }
+
+ status_t res = opaqueConsumer->acquireBuffer(buffer, 0);
+ if (res != OK) {
+ ctx->returnOpaqueBuffer(buffer);
+ if (res == INVALID_OPERATION) {
+ // Max number of images were already acquired.
+ ALOGE("%s: Max number of buffers allowed are already acquired : %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_MAX_IMAGES;
+ } else {
+ ALOGE("%s: Acquire image failed with error: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_NO_BUFFERS;
+ }
+ }
+
+ // Set SurfaceImage instance member variables
+ Image_setOpaqueBuffer(env, image, buffer);
+ env->SetLongField(image, gSurfaceImageClassInfo.mTimestamp,
+ static_cast<jlong>(buffer->mTimestamp));
+
+ return ACQUIRE_SUCCESS;
+}
+
+static jint ImageReader_lockedImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
CpuConsumer* consumer = ctx->getCpuConsumer();
CpuConsumer::LockedBuffer* buffer = ctx->getLockedBuffer();
if (buffer == NULL) {
@@ -867,6 +1063,57 @@ static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
return ACQUIRE_SUCCESS;
}
+static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return -1;
+ }
+
+ if (ctx->isOpaque()) {
+ return ImageReader_opaqueImageSetup(env, ctx, image);
+ } else {
+ return ImageReader_lockedImageSetup(env, ctx, image);
+ }
+}
+
+static jint ImageReader_detachImage(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", "ImageReader was already closed");
+ return -1;
+ }
+
+ status_t res = OK;
+ if (!ctx->isOpaque()) {
+ // TODO: Non-Opaque format detach is not implemented yet.
+ jniThrowRuntimeException(env,
+ "nativeDetachImage is not implemented yet for non-opaque format !!!");
+ return -1;
+ }
+
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ if (!opaqueBuffer) {
+ ALOGE(
+ "Opaque Image already released and can not be detached from ImageReader!!!");
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque Image detach from ImageReader failed: buffer was already released");
+ return -1;
+ }
+
+ res = opaqueConsumer->detachBuffer(opaqueBuffer->mSlot);
+ if (res != OK) {
+ ALOGE("Opaque Image detach failed: %s (%d)!!!", strerror(-res), res);
+ jniThrowRuntimeException(env,
+ "nativeDetachImage failed for opaque image!!!");
+ return res;
+ }
+ return OK;
+}
+
static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
{
ALOGV("%s: ", __FUNCTION__);
@@ -884,7 +1131,16 @@ static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int readerFormat)
{
int rowStride, pixelStride;
+ PublicFormat publicReaderFormat = static_cast<PublicFormat>(readerFormat);
+ int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ publicReaderFormat);
+
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(halReaderFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any planes");
+ return NULL;
+ }
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
@@ -893,10 +1149,8 @@ static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- readerFormat = Image_getPixelFormat(env, readerFormat);
-
- rowStride = Image_imageGetRowStride(env, buffer, idx, readerFormat);
- pixelStride = Image_imageGetPixelStride(env, buffer, idx, readerFormat);
+ rowStride = Image_imageGetRowStride(env, buffer, idx, halReaderFormat);
+ pixelStride = Image_imageGetPixelStride(env, buffer, idx, halReaderFormat);
jobject surfPlaneObj = env->NewObject(gSurfacePlaneClassInfo.clazz,
gSurfacePlaneClassInfo.ctor, thiz, idx, rowStride, pixelStride);
@@ -909,19 +1163,26 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
uint8_t *base = NULL;
uint32_t size = 0;
jobject byteBuffer;
+ PublicFormat readerPublicFormat = static_cast<PublicFormat>(readerFormat);
+ int readerHalFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ readerPublicFormat);
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(readerHalFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any plane");
+ return NULL;
+ }
+
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
if (buffer == NULL) {
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- readerFormat = Image_getPixelFormat(env, readerFormat);
-
// Create byteBuffer from native buffer
- Image_getLockedBufferInfo(env, buffer, idx, &base, &size, readerFormat);
+ Image_getLockedBufferInfo(env, buffer, idx, &base, &size, readerHalFormat);
if (size > static_cast<uint32_t>(INT32_MAX)) {
// Byte buffer have 'int capacity', so check the range
@@ -939,18 +1200,40 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
return byteBuffer;
}
-static jint Image_getWidth(JNIEnv* env, jobject thiz)
+static jint Image_getWidth(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferWidth(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferWidth(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferWidth(buffer);
+ }
}
-static jint Image_getHeight(JNIEnv* env, jobject thiz)
+static jint Image_getHeight(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferHeight(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferHeight(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferHeight(buffer);
+ }
}
+static jint Image_getFormat(JNIEnv* env, jobject thiz, jint readerFormat)
+{
+ if (isFormatOpaque(readerFormat)) {
+ // Assuming opaque reader produce opaque images.
+ return static_cast<jint>(PublicFormat::PRIVATE);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ PublicFormat publicFmt = android_view_Surface_mapHalFormatDataspaceToPublicFormat(
+ buffer->flexFormat, buffer->dataSpace);
+ return static_cast<jint>(publicFmt);
+ }
+}
} // extern "C"
@@ -963,14 +1246,16 @@ static JNINativeMethod gImageReaderMethods[] = {
{"nativeReleaseImage", "(Landroid/media/Image;)V", (void*)ImageReader_imageRelease },
{"nativeImageSetup", "(Landroid/media/Image;)I", (void*)ImageReader_imageSetup },
{"nativeGetSurface", "()Landroid/view/Surface;", (void*)ImageReader_getSurface },
+ {"nativeDetachImage", "(Landroid/media/Image;)I", (void*)ImageReader_detachImage },
};
static JNINativeMethod gImageMethods[] = {
{"nativeImageGetBuffer", "(II)Ljava/nio/ByteBuffer;", (void*)Image_getByteBuffer },
{"nativeCreatePlane", "(II)Landroid/media/ImageReader$SurfaceImage$SurfacePlane;",
(void*)Image_createSurfacePlane },
- {"nativeGetWidth", "()I", (void*)Image_getWidth },
- {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetWidth", "(I)I", (void*)Image_getWidth },
+ {"nativeGetHeight", "(I)I", (void*)Image_getHeight },
+ {"nativeGetFormat", "(I)I", (void*)Image_getFormat },
};
int register_android_media_ImageReader(JNIEnv *env) {
diff --git a/media/jni/android_media_ImageWriter.cpp b/media/jni/android_media_ImageWriter.cpp
new file mode 100644
index 0000000..294cd84
--- /dev/null
+++ b/media/jni/android_media_ImageWriter.cpp
@@ -0,0 +1,1083 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ImageWriter_JNI"
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
+#include <gui/CpuConsumer.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_runtime/android_view_Surface.h>
+#include <camera3.h>
+
+#include <jni.h>
+#include <JNIHelp.h>
+
+#include <stdint.h>
+#include <inttypes.h>
+
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
+
+#define IMAGE_BUFFER_JNI_ID "mNativeBuffer"
+
+// ----------------------------------------------------------------------------
+
+using namespace android;
+
+enum {
+ IMAGE_WRITER_MAX_NUM_PLANES = 3,
+};
+
+static struct {
+ jmethodID postEventFromNative;
+ jfieldID mWriterFormat;
+} gImageWriterClassInfo;
+
+static struct {
+ jfieldID mNativeBuffer;
+ jfieldID mNativeFenceFd;
+ jfieldID mPlanes;
+} gSurfaceImageClassInfo;
+
+static struct {
+ jclass clazz;
+ jmethodID ctor;
+} gSurfacePlaneClassInfo;
+
+typedef CpuConsumer::LockedBuffer LockedImage;
+
+// ----------------------------------------------------------------------------
+
+class JNIImageWriterContext : public BnProducerListener {
+public:
+ JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz);
+
+ virtual ~JNIImageWriterContext();
+
+ // Implementation of IProducerListener, used to notify the ImageWriter that the consumer
+ // has returned a buffer and it is ready for ImageWriter to dequeue.
+ virtual void onBufferReleased();
+
+ void setProducer(const sp<Surface>& producer) { mProducer = producer; }
+ Surface* getProducer() { return mProducer.get(); }
+
+ void setBufferFormat(int format) { mFormat = format; }
+ int getBufferFormat() { return mFormat; }
+
+ void setBufferWidth(int width) { mWidth = width; }
+ int getBufferWidth() { return mWidth; }
+
+ void setBufferHeight(int height) { mHeight = height; }
+ int getBufferHeight() { return mHeight; }
+
+private:
+ static JNIEnv* getJNIEnv(bool* needsDetach);
+ static void detachJNI();
+
+ sp<Surface> mProducer;
+ jobject mWeakThiz;
+ jclass mClazz;
+ int mFormat;
+ int mWidth;
+ int mHeight;
+};
+
+JNIImageWriterContext::JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz) :
+ mWeakThiz(env->NewGlobalRef(weakThiz)),
+ mClazz((jclass)env->NewGlobalRef(clazz)),
+ mFormat(0),
+ mWidth(-1),
+ mHeight(-1) {
+}
+
+JNIImageWriterContext::~JNIImageWriterContext() {
+ ALOGV("%s", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ env->DeleteGlobalRef(mWeakThiz);
+ env->DeleteGlobalRef(mClazz);
+ } else {
+ ALOGW("leaking JNI object references");
+ }
+ if (needsDetach) {
+ detachJNI();
+ }
+
+ mProducer.clear();
+}
+
+JNIEnv* JNIImageWriterContext::getJNIEnv(bool* needsDetach) {
+ ALOGV("%s", __FUNCTION__);
+ LOG_ALWAYS_FATAL_IF(needsDetach == NULL, "needsDetach is null!!!");
+ *needsDetach = false;
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ if (env == NULL) {
+ JavaVMAttachArgs args = {JNI_VERSION_1_4, NULL, NULL};
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->AttachCurrentThread(&env, (void*) &args);
+ if (result != JNI_OK) {
+ ALOGE("thread attach failed: %#x", result);
+ return NULL;
+ }
+ *needsDetach = true;
+ }
+ return env;
+}
+
+void JNIImageWriterContext::detachJNI() {
+ ALOGV("%s", __FUNCTION__);
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->DetachCurrentThread();
+ if (result != JNI_OK) {
+ ALOGE("thread detach failed: %#x", result);
+ }
+}
+
+void JNIImageWriterContext::onBufferReleased() {
+ ALOGV("%s: buffer released", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ // Detach the buffer every time when a buffer consumption is done,
+ // need let this callback give a BufferItem, then only detach if it was attached to this
+ // Writer. Do the detach unconditionally for opaque format now. see b/19977520
+ if (mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ sp<Fence> fence;
+ sp<GraphicBuffer> buffer;
+ ALOGV("%s: One buffer is detached", __FUNCTION__);
+ mProducer->detachNextBuffer(&buffer, &fence);
+ }
+
+ env->CallStaticVoidMethod(mClazz, gImageWriterClassInfo.postEventFromNative, mWeakThiz);
+ } else {
+ ALOGW("onBufferReleased event will not posted");
+ }
+
+ if (needsDetach) {
+ detachJNI();
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+extern "C" {
+
+// -------------------------------Private method declarations--------------
+
+static bool isPossiblyYUV(PixelFormat format);
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd);
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd);
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz);
+static bool isFormatOpaque(int format);
+
+// --------------------------ImageWriter methods---------------------------------------
+
+static void ImageWriter_classInit(JNIEnv* env, jclass clazz) {
+ ALOGV("%s:", __FUNCTION__);
+ jclass imageClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage");
+ LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage");
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
+ imageClazz, IMAGE_BUFFER_JNI_ID, "J");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.%s", IMAGE_BUFFER_JNI_ID);
+
+ gSurfaceImageClassInfo.mNativeFenceFd = env->GetFieldID(
+ imageClazz, "mNativeFenceFd", "I");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeFenceFd == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mNativeFenceFd");
+
+ gSurfaceImageClassInfo.mPlanes = env->GetFieldID(
+ imageClazz, "mPlanes", "[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mPlanes == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mPlanes");
+
+ gImageWriterClassInfo.postEventFromNative = env->GetStaticMethodID(
+ clazz, "postEventFromNative", "(Ljava/lang/Object;)V");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.postEventFromNative == NULL,
+ "can't find android/media/ImageWriter.postEventFromNative");
+
+ gImageWriterClassInfo.mWriterFormat = env->GetFieldID(
+ clazz, "mWriterFormat", "I");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.mWriterFormat == NULL,
+ "can't find android/media/ImageWriter.mWriterFormat");
+
+ jclass planeClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage$SurfacePlane");
+ LOG_ALWAYS_FATAL_IF(planeClazz == NULL, "Can not find SurfacePlane class");
+ // FindClass only gives a local reference of jclass object.
+ gSurfacePlaneClassInfo.clazz = (jclass) env->NewGlobalRef(planeClazz);
+ gSurfacePlaneClassInfo.ctor = env->GetMethodID(gSurfacePlaneClassInfo.clazz, "<init>",
+ "(Landroid/media/ImageWriter$WriterSurfaceImage;IILjava/nio/ByteBuffer;)V");
+ LOG_ALWAYS_FATAL_IF(gSurfacePlaneClassInfo.ctor == NULL,
+ "Can not find SurfacePlane constructor");
+}
+
+static jlong ImageWriter_init(JNIEnv* env, jobject thiz, jobject weakThiz, jobject jsurface,
+ jint maxImages) {
+ status_t res;
+
+ ALOGV("%s: maxImages:%d", __FUNCTION__, maxImages);
+
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface == NULL) {
+ jniThrowException(env,
+ "java/lang/IllegalArgumentException",
+ "The surface has been released");
+ return 0;
+ }
+ sp<IGraphicBufferProducer> bufferProducer = surface->getIGraphicBufferProducer();
+
+ jclass clazz = env->GetObjectClass(thiz);
+ if (clazz == NULL) {
+ jniThrowRuntimeException(env, "Can't find android/graphics/ImageWriter");
+ return 0;
+ }
+ sp<JNIImageWriterContext> ctx(new JNIImageWriterContext(env, weakThiz, clazz));
+
+ sp<Surface> producer = new Surface(bufferProducer, /*controlledByApp*/false);
+ ctx->setProducer(producer);
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not connectable
+ * after disconnect. MEDIA or CAMERA are treated the same internally. The producer listener
+ * will be cleared after disconnect call.
+ */
+ producer->connect(/*api*/NATIVE_WINDOW_API_CAMERA, /*listener*/ctx);
+ jlong nativeCtx = reinterpret_cast<jlong>(ctx.get());
+
+ // Get the dimension and format of the producer.
+ sp<ANativeWindow> anw = producer;
+ int32_t width, height, format;
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ ALOGE("%s: Query Surface width failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface width");
+ return 0;
+ }
+ ctx->setBufferWidth(width);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ ALOGE("%s: Query Surface height failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface height");
+ return 0;
+ }
+ ctx->setBufferHeight(height);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Query Surface format failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface format");
+ return 0;
+ }
+ ctx->setBufferFormat(format);
+ env->SetIntField(thiz, gImageWriterClassInfo.mWriterFormat, reinterpret_cast<jint>(format));
+
+
+ if (!isFormatOpaque(format)) {
+ res = native_window_set_usage(anw.get(), GRALLOC_USAGE_SW_WRITE_OFTEN);
+ if (res != OK) {
+ ALOGE("%s: Configure usage %08x for format %08x failed: %s (%d)",
+ __FUNCTION__, GRALLOC_USAGE_SW_WRITE_OFTEN, format, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to SW_WRITE_OFTEN configure usage");
+ return 0;
+ }
+ }
+
+ int minUndequeuedBufferCount = 0;
+ res = anw->query(anw.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Query producer undequeued buffer count failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Query producer undequeued buffer count failed");
+ return 0;
+ }
+
+ size_t totalBufferCount = maxImages + minUndequeuedBufferCount;
+ res = native_window_set_buffer_count(anw.get(), totalBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Set buffer count failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Set buffer count failed");
+ return 0;
+ }
+
+ if (ctx != 0) {
+ ctx->incStrong((void*)ImageWriter_init);
+ }
+ return nativeCtx;
+}
+
+static void ImageWriter_dequeueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+ android_native_buffer_t *anb = NULL;
+ int fenceFd = -1;
+ status_t res = anw->dequeueBuffer(anw.get(), &anb, &fenceFd);
+ if (res != OK) {
+ // TODO: handle different error cases here.
+ ALOGE("%s: Dequeue buffer failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "dequeue buffer failed");
+ return;
+ }
+ // New GraphicBuffer object doesn't own the handle, thus the native buffer
+ // won't be freed when this object is destroyed.
+ sp<GraphicBuffer> buffer(new GraphicBuffer(anb, /*keepOwnership*/false));
+
+ // Note that:
+ // 1. No need to lock buffer now, will only lock it when the first getPlanes() is called.
+ // 2. Fence will be saved to mNativeFenceFd, and will consumed by lock/queue/cancel buffer
+ // later.
+ // 3. need use lockAsync here, as it will handle the dequeued fence for us automatically.
+
+ // Finally, set the native info into image object.
+ Image_setNativeContext(env, image, buffer, fenceFd);
+}
+
+static void ImageWriter_close(JNIEnv* env, jobject thiz, jlong nativeCtx) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ ANativeWindow* producer = ctx->getProducer();
+ if (producer != NULL) {
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not
+ * connectable after disconnect. MEDIA or CAMERA are treated the same internally.
+ * The producer listener will be cleared after disconnect call.
+ */
+ status_t res = native_window_api_disconnect(producer, /*api*/NATIVE_WINDOW_API_CAMERA);
+ /**
+ * This is not an error. if client calling process dies, the window will
+ * also die and all calls to it will return DEAD_OBJECT, thus it's already
+ * "disconnected"
+ */
+ if (res == DEAD_OBJECT) {
+ ALOGW("%s: While disconnecting ImageWriter from native window, the"
+ " native window died already", __FUNCTION__);
+ } else if (res != OK) {
+ ALOGE("%s: native window disconnect failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Native window disconnect failed");
+ return;
+ }
+ }
+
+ ctx->decStrong((void*)ImageWriter_init);
+}
+
+static void ImageWriter_cancelImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock the image if it was locked
+ Image_unlockIfLocked(env, image);
+
+ anw->cancelBuffer(anw.get(), buffer, fenceFd);
+
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static void ImageWriter_queueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image,
+ jlong timestampNs, jint left, jint top, jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ status_t res = OK;
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock image if it was locked.
+ Image_unlockIfLocked(env, image);
+
+ // Set timestamp
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return;
+ }
+
+ // Set crop
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return;
+ }
+
+ // Finally, queue input buffer
+ res = anw->queueBuffer(anw.get(), buffer, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return;
+ }
+
+ // Clear the image native context: end of this image's lifecycle in public API.
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static jint ImageWriter_attachAndQueueImage(JNIEnv* env, jobject thiz, jlong nativeCtx,
+ jlong nativeBuffer, jint imageFormat, jlong timestampNs, jint left, jint top,
+ jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return -1;
+ }
+
+ sp<Surface> surface = ctx->getProducer();
+ status_t res = OK;
+ if (!isFormatOpaque(imageFormat)) {
+ // TODO: need implement, see b/19962027
+ jniThrowRuntimeException(env,
+ "nativeAttachImage for non-opaque image is not implement yet!!!");
+ return -1;
+ }
+
+ if (!isFormatOpaque(ctx->getBufferFormat())) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Trying to attach an opaque image into a non-opaque ImageWriter");
+ return -1;
+ }
+
+ // Image is guaranteed to be from ImageReader at this point, so it is safe to
+ // cast to BufferItem pointer.
+ BufferItem* opaqueBuffer = reinterpret_cast<BufferItem*>(nativeBuffer);
+ if (opaqueBuffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized or already closed");
+ return -1;
+ }
+
+ // Step 1. Attach Image
+ res = surface->attachBuffer(opaqueBuffer->mGraphicBuffer.get());
+ if (res != OK) {
+ // TODO: handle different error case separately.
+ ALOGE("Attach image failed: %s (%d)", strerror(-res), res);
+ jniThrowRuntimeException(env, "nativeAttachImage failed!!!");
+ return res;
+ }
+ sp < ANativeWindow > anw = surface;
+
+ // Step 2. Set timestamp and crop. Note that we do not need unlock the image because
+ // it was not locked.
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return res;
+ }
+
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return res;
+ }
+
+ // Step 3. Queue Image.
+ res = anw->queueBuffer(anw.get(), opaqueBuffer->mGraphicBuffer.get(), /*fenceFd*/
+ -1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return res;
+ }
+
+ // Do not set the image native context. Since it would overwrite the existing native context
+ // of the image that is from ImageReader, the subsequent image close will run into issues.
+
+ return res;
+}
+
+// --------------------------Image methods---------------------------------------
+
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd) {
+ ALOGV("%s", __FUNCTION__);
+ if (buffer != NULL) {
+ GraphicBuffer *gb = reinterpret_cast<GraphicBuffer *>
+ (env->GetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer));
+ *buffer = gb;
+ }
+
+ if (fenceFd != NULL) {
+ *fenceFd = reinterpret_cast<jint>(env->GetIntField(
+ thiz, gSurfaceImageClassInfo.mNativeFenceFd));
+ }
+}
+
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ GraphicBuffer* p = NULL;
+ Image_getNativeContext(env, thiz, &p, /*fenceFd*/NULL);
+ if (buffer != 0) {
+ buffer->incStrong((void*)Image_setNativeContext);
+ }
+ if (p) {
+ p->decStrong((void*)Image_setNativeContext);
+ }
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer,
+ reinterpret_cast<jlong>(buffer.get()));
+
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Is locked?
+ bool isLocked = false;
+ jobject planes = NULL;
+ if (!isFormatOpaque(buffer->getPixelFormat())) {
+ planes = env->GetObjectField(thiz, gSurfaceImageClassInfo.mPlanes);
+ }
+ isLocked = (planes != NULL);
+ if (isLocked) {
+ // no need to use fence here, as we it will be consumed by either cancel or queue buffer.
+ status_t res = buffer->unlock();
+ if (res != OK) {
+ jniThrowRuntimeException(env, "unlock buffer failed");
+ }
+ ALOGV("Successfully unlocked the image");
+ }
+}
+
+static jint Image_getWidth(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getWidth();
+}
+
+static jint Image_getHeight(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getHeight();
+}
+
+// Some formats like JPEG defined with different values between android.graphics.ImageFormat and
+// graphics.h, need convert to the one defined in graphics.h here.
+static int Image_getPixelFormat(JNIEnv* env, int format) {
+ int jpegFormat;
+ jfieldID fid;
+
+ ALOGV("%s: format = 0x%x", __FUNCTION__, format);
+
+ jclass imageFormatClazz = env->FindClass("android/graphics/ImageFormat");
+ ALOG_ASSERT(imageFormatClazz != NULL);
+
+ fid = env->GetStaticFieldID(imageFormatClazz, "JPEG", "I");
+ jpegFormat = env->GetStaticIntField(imageFormatClazz, fid);
+
+ // Translate the JPEG to BLOB for camera purpose.
+ if (format == jpegFormat) {
+ format = HAL_PIXEL_FORMAT_BLOB;
+ }
+
+ return format;
+}
+
+static jint Image_getFormat(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return 0;
+ }
+
+ return Image_getPixelFormat(env, buffer->getPixelFormat());
+}
+
+static void Image_setFenceFd(JNIEnv* env, jobject thiz, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_getLockedImage(JNIEnv* env, jobject thiz, LockedImage *image) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ int fenceFd = -1;
+ Image_getNativeContext(env, thiz, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ void* pData = NULL;
+ android_ycbcr ycbcr = android_ycbcr();
+ status_t res;
+ int format = Image_getFormat(env, thiz);
+ int flexFormat = format;
+ if (isPossiblyYUV(format)) {
+ // ImageWriter doesn't use crop by itself, app sets it, use the no crop version.
+ res = buffer->lockAsyncYCbCr(GRALLOC_USAGE_SW_WRITE_OFTEN, &ycbcr, fenceFd);
+ // Clear the fenceFd as it is already consumed by lock call.
+ Image_setFenceFd(env, thiz, /*fenceFd*/-1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsyncYCbCr failed for YUV buffer");
+ return;
+ }
+ pData = ycbcr.y;
+ flexFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
+ }
+
+ // lockAsyncYCbCr for YUV is unsuccessful.
+ if (pData == NULL) {
+ res = buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &pData, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsync failed");
+ return;
+ }
+ }
+
+ image->data = reinterpret_cast<uint8_t*>(pData);
+ image->width = buffer->getWidth();
+ image->height = buffer->getHeight();
+ image->format = format;
+ image->flexFormat = flexFormat;
+ image->stride = (ycbcr.y != NULL) ? static_cast<uint32_t>(ycbcr.ystride) : buffer->getStride();
+
+ image->dataCb = reinterpret_cast<uint8_t*>(ycbcr.cb);
+ image->dataCr = reinterpret_cast<uint8_t*>(ycbcr.cr);
+ image->chromaStride = static_cast<uint32_t>(ycbcr.cstride);
+ image->chromaStep = static_cast<uint32_t>(ycbcr.chroma_step);
+ ALOGV("Successfully locked the image");
+ // crop, transform, scalingMode, timestamp, and frameNumber should be set by producer,
+ // and we don't set them here.
+}
+
+static bool usingRGBAToJpegOverride(int32_t bufferFormat, int32_t writerCtxFormat) {
+ return writerCtxFormat == HAL_PIXEL_FORMAT_BLOB && bufferFormat == HAL_PIXEL_FORMAT_RGBA_8888;
+}
+
+static int32_t applyFormatOverrides(int32_t bufferFormat, int32_t writerCtxFormat)
+{
+ // Using HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers containing JPEGs to get around SW
+ // write limitations for some platforms (b/17379185).
+ if (usingRGBAToJpegOverride(bufferFormat, writerCtxFormat)) {
+ return HAL_PIXEL_FORMAT_BLOB;
+ }
+ return bufferFormat;
+}
+
+static uint32_t Image_getJpegSize(LockedImage* buffer, bool usingRGBAOverride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ uint32_t size = 0;
+ uint32_t width = buffer->width;
+ uint8_t* jpegBuffer = buffer->data;
+
+ if (usingRGBAOverride) {
+ width = (buffer->width + buffer->stride * (buffer->height - 1)) * 4;
+ }
+
+ // First check for JPEG transport header at the end of the buffer
+ uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob));
+ struct camera3_jpeg_blob *blob = (struct camera3_jpeg_blob*)(header);
+ if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID) {
+ size = blob->jpeg_size;
+ ALOGV("%s: Jpeg size = %d", __FUNCTION__, size);
+ }
+
+ // failed to find size, default to whole buffer
+ if (size == 0) {
+ /*
+ * This is a problem because not including the JPEG header
+ * means that in certain rare situations a regular JPEG blob
+ * will be misidentified as having a header, in which case
+ * we will get a garbage size value.
+ */
+ ALOGW("%s: No JPEG header detected, defaulting to size=width=%d",
+ __FUNCTION__, width);
+ size = width;
+ }
+
+ return size;
+}
+
+static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
+ int32_t writerFormat, uint8_t **base, uint32_t *size, int *pixelStride, int *rowStride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ ALOG_ASSERT(base != NULL, "base is NULL!!!");
+ ALOG_ASSERT(size != NULL, "size is NULL!!!");
+ ALOG_ASSERT(pixelStride != NULL, "pixelStride is NULL!!!");
+ ALOG_ASSERT(rowStride != NULL, "rowStride is NULL!!!");
+ ALOG_ASSERT((idx < IMAGE_WRITER_MAX_NUM_PLANES) && (idx >= 0));
+
+ ALOGV("%s: buffer: %p", __FUNCTION__, buffer);
+
+ uint32_t dataSize, ySize, cSize, cStride;
+ uint32_t pStride = 0, rStride = 0;
+ uint8_t *cb, *cr;
+ uint8_t *pData = NULL;
+ int bytesPerPixel = 0;
+
+ dataSize = ySize = cSize = cStride = 0;
+ int32_t fmt = buffer->flexFormat;
+
+ bool usingRGBAOverride = usingRGBAToJpegOverride(fmt, writerFormat);
+ fmt = applyFormatOverrides(fmt, writerFormat);
+ switch (fmt) {
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ buffer->dataCb :
+ buffer->dataCr;
+ // only map until last pixel
+ if (idx == 0) {
+ pStride = 1;
+ rStride = buffer->stride;
+ dataSize = buffer->stride * (buffer->height - 1) + buffer->width;
+ } else {
+ pStride = buffer->chromaStep;
+ rStride = buffer->chromaStride;
+ dataSize = buffer->chromaStride * (buffer->height / 2 - 1) +
+ buffer->chromaStep * (buffer->width / 2 - 1) + 1;
+ }
+ break;
+ // NV21
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ cr = buffer->data + (buffer->stride * buffer->height);
+ cb = cr + 1;
+ // only map until last pixel
+ ySize = buffer->width * (buffer->height - 1) + buffer->width;
+ cSize = buffer->width * (buffer->height / 2 - 1) + buffer->width - 1;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb:
+ cr;
+
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = (idx == 0) ? 1 : 2;
+ rStride = buffer->width;
+ break;
+ case HAL_PIXEL_FORMAT_YV12:
+ // Y and C stride need to be 16 pixel aligned.
+ LOG_ALWAYS_FATAL_IF(buffer->stride % 16,
+ "Stride is not 16 pixel aligned %d", buffer->stride);
+
+ ySize = buffer->stride * buffer->height;
+ cStride = ALIGN(buffer->stride / 2, 16);
+ cr = buffer->data + ySize;
+ cSize = cStride * buffer->height / 2;
+ cb = cr + cSize;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb :
+ cr;
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = 1;
+ rStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
+ break;
+ case HAL_PIXEL_FORMAT_Y8:
+ // Single plane, 8bpp.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 1;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_Y16:
+ bytesPerPixel = 2;
+ // Single plane, 16bpp, strides are specified in pixels, not in bytes
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_BLOB:
+ // Used for JPEG data, height must be 1, width == size, single plane.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ ALOG_ASSERT(buffer->height == 1, "JPEG should has height value %d", buffer->height);
+
+ pData = buffer->data;
+ dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
+ pStride = bytesPerPixel;
+ rowStride = 0;
+ break;
+ case HAL_PIXEL_FORMAT_RAW16:
+ // Single plane 16bpp bayer data.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RAW10:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 10 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 10 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 0;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ // Single plane, 32bpp.
+ bytesPerPixel = 4;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 4;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ // Single plane, 16bpp.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_888:
+ // Single plane, 24bpp.
+ bytesPerPixel = 3;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 3;
+ break;
+ default:
+ jniThrowExceptionFmt(env, "java/lang/UnsupportedOperationException",
+ "Pixel format: 0x%x is unsupported", fmt);
+ break;
+ }
+
+ *base = pData;
+ *size = dataSize;
+ *pixelStride = pStride;
+ *rowStride = rStride;
+}
+
+static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
+ int numPlanes, int writerFormat) {
+ ALOGV("%s: create SurfacePlane array with size %d", __FUNCTION__, numPlanes);
+ int rowStride, pixelStride;
+ uint8_t *pData;
+ uint32_t dataSize;
+ jobject byteBuffer;
+
+ int format = Image_getFormat(env, thiz);
+ if (isFormatOpaque(format) && numPlanes > 0) {
+ String8 msg;
+ msg.appendFormat("Format 0x%x is opaque, thus not writable, the number of planes (%d)"
+ " must be 0", format, numPlanes);
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg.string());
+ return NULL;
+ }
+
+ jobjectArray surfacePlanes = env->NewObjectArray(numPlanes, gSurfacePlaneClassInfo.clazz,
+ /*initial_element*/NULL);
+ if (surfacePlanes == NULL) {
+ jniThrowRuntimeException(env, "Failed to create SurfacePlane arrays,"
+ " probably out of memory");
+ return NULL;
+ }
+ if (isFormatOpaque(format)) {
+ return surfacePlanes;
+ }
+
+ // Buildup buffer info: rowStride, pixelStride and byteBuffers.
+ LockedImage lockedImg = LockedImage();
+ Image_getLockedImage(env, thiz, &lockedImg);
+
+ // Create all SurfacePlanes
+ writerFormat = Image_getPixelFormat(env, writerFormat);
+ for (int i = 0; i < numPlanes; i++) {
+ Image_getLockedImageInfo(env, &lockedImg, i, writerFormat,
+ &pData, &dataSize, &pixelStride, &rowStride);
+ byteBuffer = env->NewDirectByteBuffer(pData, dataSize);
+ if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to allocate ByteBuffer");
+ return NULL;
+ }
+
+ // Finally, create this SurfacePlane.
+ jobject surfacePlane = env->NewObject(gSurfacePlaneClassInfo.clazz,
+ gSurfacePlaneClassInfo.ctor, thiz, rowStride, pixelStride, byteBuffer);
+ env->SetObjectArrayElement(surfacePlanes, i, surfacePlane);
+ }
+
+ return surfacePlanes;
+}
+
+// -------------------------------Private convenience methods--------------------
+
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
+static bool isPossiblyYUV(PixelFormat format) {
+ switch (static_cast<int>(format)) {
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGB_888:
+ case HAL_PIXEL_FORMAT_RGB_565:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_Y8:
+ case HAL_PIXEL_FORMAT_Y16:
+ case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ return false;
+
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ case HAL_PIXEL_FORMAT_YCbCr_422_I:
+ default:
+ return true;
+ }
+}
+
+} // extern "C"
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gImageWriterMethods[] = {
+ {"nativeClassInit", "()V", (void*)ImageWriter_classInit },
+ {"nativeInit", "(Ljava/lang/Object;Landroid/view/Surface;I)J",
+ (void*)ImageWriter_init },
+ {"nativeClose", "(J)V", (void*)ImageWriter_close },
+ {"nativeAttachAndQueueImage", "(JJIJIIII)I", (void*)ImageWriter_attachAndQueueImage },
+ {"nativeDequeueInputImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_dequeueImage },
+ {"nativeQueueInputImage", "(JLandroid/media/Image;JIIII)V", (void*)ImageWriter_queueImage },
+ {"cancelImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_cancelImage },
+};
+
+static JNINativeMethod gImageMethods[] = {
+ {"nativeCreatePlanes", "(II)[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;",
+ (void*)Image_createSurfacePlanes },
+ {"nativeGetWidth", "()I", (void*)Image_getWidth },
+ {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetFormat", "()I", (void*)Image_getFormat },
+};
+
+int register_android_media_ImageWriter(JNIEnv *env) {
+
+ int ret1 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter", gImageWriterMethods, NELEM(gImageWriterMethods));
+
+ int ret2 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter$WriterSurfaceImage", gImageMethods, NELEM(gImageMethods));
+
+ return (ret1 || ret2);
+}
+
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
index 1cf589d..93b8ec7 100644
--- a/media/jni/android_media_MediaCodec.cpp
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -39,7 +39,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/MediaErrors.h>
-
+#include <media/stagefright/PersistentSurface.h>
#include <nativehelper/ScopedLocalRef.h>
#include <system/window.h>
@@ -70,6 +70,19 @@ static struct CodecActionCodes {
jint codecActionRecoverable;
} gCodecActionCodes;
+static struct CodecErrorCodes {
+ jint errorInsufficientResource;
+ jint errorReclaimed;
+} gCodecErrorCodes;
+
+static struct {
+ jclass clazz;
+ jfieldID mLock;
+ jfieldID mPersistentObject;
+ jmethodID ctor;
+ jmethodID setNativeObjectLocked;
+} gPersistentSurfaceClassInfo;
+
struct fields_t {
jfieldID context;
jmethodID postEventFromNativeID;
@@ -82,6 +95,7 @@ struct fields_t {
};
static fields_t gFields;
+static const void *sRefBaseOwner;
////////////////////////////////////////////////////////////////////////////////
@@ -215,7 +229,7 @@ void JMediaCodec::deleteJavaObjects(JNIEnv *env) {
status_t JMediaCodec::setCallback(jobject cb) {
if (cb != NULL) {
if (mCallbackNotification == NULL) {
- mCallbackNotification = new AMessage(kWhatCallbackNotify, id());
+ mCallbackNotification = new AMessage(kWhatCallbackNotify, this);
}
} else {
mCallbackNotification.clear();
@@ -240,11 +254,29 @@ status_t JMediaCodec::configure(
return mCodec->configure(format, mSurfaceTextureClient, crypto, flags);
}
+status_t JMediaCodec::setSurface(
+ const sp<IGraphicBufferProducer> &bufferProducer) {
+ sp<Surface> client;
+ if (bufferProducer != NULL) {
+ client = new Surface(bufferProducer, true /* controlledByApp */);
+ }
+ status_t err = mCodec->setSurface(client);
+ if (err == OK) {
+ mSurfaceTextureClient = client;
+ }
+ return err;
+}
+
status_t JMediaCodec::createInputSurface(
sp<IGraphicBufferProducer>* bufferProducer) {
return mCodec->createInputSurface(bufferProducer);
}
+status_t JMediaCodec::setInputSurface(
+ const sp<PersistentSurface> &surface) {
+ return mCodec->setInputSurface(surface);
+}
+
status_t JMediaCodec::start() {
return mCodec->start();
}
@@ -587,6 +619,18 @@ static jthrowable createCodecException(
break;
}
+ /* translate OS errors to Java API CodecException errorCodes */
+ switch (err) {
+ case NO_MEMORY:
+ err = gCodecErrorCodes.errorInsufficientResource;
+ break;
+ case DEAD_OBJECT:
+ err = gCodecErrorCodes.errorReclaimed;
+ break;
+ default: /* Other error codes go out as is. */
+ break;
+ }
+
return (jthrowable)env->NewObject(clazz.get(), ctor, err, actionCode, msgObj.get());
}
@@ -790,6 +834,10 @@ static jint throwExceptionAsNecessary(
jniThrowException(env, "java/lang/IllegalStateException", msg);
return 0;
+ case BAD_VALUE:
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg);
+ return 0;
+
default:
if (isCryptoError(err)) {
throwCryptoException(env, err, msg);
@@ -862,6 +910,149 @@ static void android_media_MediaCodec_native_configure(
throwExceptionAsNecessary(env, err);
}
+static void android_media_MediaCodec_native_setSurface(
+ JNIEnv *env,
+ jobject thiz,
+ jobject jsurface) {
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ if (jsurface != NULL) {
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface != NULL) {
+ bufferProducer = surface->getIGraphicBufferProducer();
+ } else {
+ jniThrowException(
+ env,
+ "java/lang/IllegalArgumentException",
+ "The surface has been released");
+ return;
+ }
+ }
+
+ status_t err = codec->setSurface(bufferProducer);
+ throwExceptionAsNecessary(env, err);
+}
+
+sp<PersistentSurface> android_media_MediaCodec_getPersistentInputSurface(
+ JNIEnv* env, jobject object) {
+ sp<PersistentSurface> persistentSurface;
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ persistentSurface = reinterpret_cast<PersistentSurface *>(
+ env->GetLongField(object,
+ gPersistentSurfaceClassInfo.mPersistentObject));
+ env->MonitorExit(lock);
+ }
+ env->DeleteLocalRef(lock);
+
+ return persistentSurface;
+}
+
+static jobject android_media_MediaCodec_createPersistentInputSurface(
+ JNIEnv* env, jclass /* clazz */) {
+ ALOGV("android_media_MediaCodec_createPersistentInputSurface");
+ sp<PersistentSurface> persistentSurface =
+ MediaCodec::CreatePersistentInputSurface();
+
+ if (persistentSurface == NULL) {
+ return NULL;
+ }
+
+ sp<Surface> surface = new Surface(
+ persistentSurface->getBufferProducer(), true);
+ if (surface == NULL) {
+ return NULL;
+ }
+
+ jobject object = env->NewObject(
+ gPersistentSurfaceClassInfo.clazz,
+ gPersistentSurfaceClassInfo.ctor);
+
+ if (object == NULL) {
+ if (env->ExceptionCheck()) {
+ ALOGE("Could not create PersistentSurface.");
+ env->ExceptionClear();
+ }
+ return NULL;
+ }
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ env->CallVoidMethod(
+ object,
+ gPersistentSurfaceClassInfo.setNativeObjectLocked,
+ (jlong)surface.get());
+ env->SetLongField(
+ object,
+ gPersistentSurfaceClassInfo.mPersistentObject,
+ (jlong)persistentSurface.get());
+ env->MonitorExit(lock);
+ } else {
+ env->DeleteLocalRef(object);
+ object = NULL;
+ }
+ env->DeleteLocalRef(lock);
+
+ if (object != NULL) {
+ surface->incStrong(&sRefBaseOwner);
+ persistentSurface->incStrong(&sRefBaseOwner);
+ }
+
+ return object;
+}
+
+static void android_media_MediaCodec_releasePersistentInputSurface(
+ JNIEnv* env, jclass /* clazz */, jobject object) {
+ sp<PersistentSurface> persistentSurface;
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ persistentSurface = reinterpret_cast<PersistentSurface *>(
+ env->GetLongField(
+ object, gPersistentSurfaceClassInfo.mPersistentObject));
+ env->SetLongField(
+ object,
+ gPersistentSurfaceClassInfo.mPersistentObject,
+ (jlong)0);
+ env->MonitorExit(lock);
+ }
+ env->DeleteLocalRef(lock);
+
+ if (persistentSurface != NULL) {
+ persistentSurface->decStrong(&sRefBaseOwner);
+ }
+ // no need to release surface as it will be released by Surface's jni
+}
+
+static void android_media_MediaCodec_setInputSurface(
+ JNIEnv* env, jobject thiz, jobject object) {
+ ALOGV("android_media_MediaCodec_setInputSurface");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+ if (codec == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<PersistentSurface> persistentSurface =
+ android_media_MediaCodec_getPersistentInputSurface(env, object);
+
+ status_t err = codec->setInputSurface(persistentSurface);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ }
+}
+
static jobject android_media_MediaCodec_createInputSurface(JNIEnv* env,
jobject thiz) {
ALOGV("android_media_MediaCodec_createInputSurface");
@@ -1454,6 +1645,39 @@ static void android_media_MediaCodec_native_init(JNIEnv *env) {
CHECK(field != NULL);
gCodecActionCodes.codecActionRecoverable =
env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "ERROR_INSUFFICIENT_RESOURCE", "I");
+ CHECK(field != NULL);
+ gCodecErrorCodes.errorInsufficientResource =
+ env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "ERROR_RECLAIMED", "I");
+ CHECK(field != NULL);
+ gCodecErrorCodes.errorReclaimed =
+ env->GetStaticIntField(clazz.get(), field);
+
+ clazz.reset(env->FindClass("android/view/Surface"));
+ CHECK(clazz.get() != NULL);
+
+ field = env->GetFieldID(clazz.get(), "mLock", "Ljava/lang/Object;");
+ CHECK(field != NULL);
+ gPersistentSurfaceClassInfo.mLock = field;
+
+ jmethodID method = env->GetMethodID(clazz.get(), "setNativeObjectLocked", "(J)V");
+ CHECK(method != NULL);
+ gPersistentSurfaceClassInfo.setNativeObjectLocked = method;
+
+ clazz.reset(env->FindClass("android/media/MediaCodec$PersistentSurface"));
+ CHECK(clazz.get() != NULL);
+ gPersistentSurfaceClassInfo.clazz = (jclass)env->NewGlobalRef(clazz.get());
+
+ method = env->GetMethodID(clazz.get(), "<init>", "()V");
+ CHECK(method != NULL);
+ gPersistentSurfaceClassInfo.ctor = method;
+
+ field = env->GetFieldID(clazz.get(), "mPersistentObject", "J");
+ CHECK(field != NULL);
+ gPersistentSurfaceClassInfo.mPersistentObject = field;
}
static void android_media_MediaCodec_native_setup(
@@ -1479,6 +1703,11 @@ static void android_media_MediaCodec_native_setup(
String8::format("Failed to initialize %s, error %#x", tmp, err));
env->ReleaseStringUTFChars(name, tmp);
return;
+ } if (err == NO_MEMORY) {
+ throwCodecException(env, err, ACTION_CODE_TRANSIENT,
+ String8::format("Failed to initialize %s, error %#x", tmp, err));
+ env->ReleaseStringUTFChars(name, tmp);
+ return;
} else if (err != OK) {
// believed possible to try again
jniThrowException(env, "java/io/IOException",
@@ -1504,6 +1733,17 @@ static JNINativeMethod gMethods[] = {
{ "native_reset", "()V", (void *)android_media_MediaCodec_reset },
+ { "native_releasePersistentInputSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_releasePersistentInputSurface},
+
+ { "native_createPersistentInputSurface",
+ "()Landroid/media/MediaCodec$PersistentSurface;",
+ (void *)android_media_MediaCodec_createPersistentInputSurface },
+
+ { "native_setInputSurface", "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_setInputSurface },
+
{ "native_setCallback",
"(Landroid/media/MediaCodec$Callback;)V",
(void *)android_media_MediaCodec_native_setCallback },
@@ -1513,6 +1753,10 @@ static JNINativeMethod gMethods[] = {
"Landroid/media/MediaCrypto;I)V",
(void *)android_media_MediaCodec_native_configure },
+ { "native_setSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_native_setSurface },
+
{ "createInputSurface", "()Landroid/view/Surface;",
(void *)android_media_MediaCodec_createInputSurface },
diff --git a/media/jni/android_media_MediaCodec.h b/media/jni/android_media_MediaCodec.h
index 9f2785a..a4ed67b 100644
--- a/media/jni/android_media_MediaCodec.h
+++ b/media/jni/android_media_MediaCodec.h
@@ -33,6 +33,7 @@ struct AString;
struct ICrypto;
struct IGraphicBufferProducer;
struct MediaCodec;
+struct PersistentSurface;
class Surface;
struct JMediaCodec : public AHandler {
@@ -53,7 +54,11 @@ struct JMediaCodec : public AHandler {
const sp<ICrypto> &crypto,
int flags);
+ status_t setSurface(
+ const sp<IGraphicBufferProducer> &surface);
+
status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
+ status_t setInputSurface(const sp<PersistentSurface> &surface);
status_t start();
status_t stop();
diff --git a/media/jni/android_media_MediaCodecList.cpp b/media/jni/android_media_MediaCodecList.cpp
index f8c349b..82dd48d 100644
--- a/media/jni/android_media_MediaCodecList.cpp
+++ b/media/jni/android_media_MediaCodecList.cpp
@@ -262,6 +262,27 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
return caps;
}
+static jobject android_media_MediaCodecList_getGlobalSettings(JNIEnv *env, jobject /* thiz */) {
+ sp<IMediaCodecList> mcl = getCodecList(env);
+ if (mcl == NULL) {
+ // Runtime exception already pending.
+ return NULL;
+ }
+
+ const sp<AMessage> settings = mcl->getGlobalSettings();
+ if (settings == NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "cannot get global settings");
+ return NULL;
+ }
+
+ jobject settingsObj = NULL;
+ if (ConvertMessageToMap(env, settings, &settingsObj)) {
+ return NULL;
+ }
+
+ return settingsObj;
+}
+
static void android_media_MediaCodecList_native_init(JNIEnv* /* env */) {
}
@@ -277,6 +298,10 @@ static JNINativeMethod gMethods[] = {
"(ILjava/lang/String;)Landroid/media/MediaCodecInfo$CodecCapabilities;",
(void *)android_media_MediaCodecList_getCodecCapabilities },
+ { "native_getGlobalSettings",
+ "()Ljava/util/Map;",
+ (void *)android_media_MediaCodecList_getGlobalSettings },
+
{ "findCodecByName", "(Ljava/lang/String;)I",
(void *)android_media_MediaCodecList_findCodecByName },
diff --git a/media/jni/android_media_MediaCrypto.cpp b/media/jni/android_media_MediaCrypto.cpp
index d2216fb..a9accb0 100644
--- a/media/jni/android_media_MediaCrypto.cpp
+++ b/media/jni/android_media_MediaCrypto.cpp
@@ -140,6 +140,15 @@ sp<ICrypto> JCrypto::GetCrypto(JNIEnv *env, jobject obj) {
return jcrypto->mCrypto;
}
+// JNI conversion utilities
+static Vector<uint8_t> JByteArrayToVector(JNIEnv *env, jbyteArray const &byteArray) {
+ Vector<uint8_t> vector;
+ size_t length = env->GetArrayLength(byteArray);
+ vector.insertAt((size_t)0, length);
+ env->GetByteArrayRegion(byteArray, 0, length, (jbyte *)vector.editArray());
+ return vector;
+}
+
} // namespace android
using namespace android;
@@ -274,6 +283,37 @@ static jboolean android_media_MediaCrypto_requiresSecureDecoderComponent(
return result ? JNI_TRUE : JNI_FALSE;
}
+static void android_media_MediaCrypto_setMediaDrmSession(
+ JNIEnv *env, jobject thiz, jbyteArray jsessionId) {
+ if (jsessionId == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<ICrypto> crypto = JCrypto::GetCrypto(env, thiz);
+
+ if (crypto == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ Vector<uint8_t> sessionId(JByteArrayToVector(env, jsessionId));
+
+ status_t err = crypto->setMediaDrmSession(sessionId);
+
+ String8 msg("setMediaDrmSession failed");
+ if (err == ERROR_DRM_SESSION_NOT_OPENED) {
+ msg += ": session not opened";
+ } else if (err == ERROR_UNSUPPORTED) {
+ msg += ": not supported by this crypto scheme";
+ } else if (err == NO_INIT) {
+ msg += ": crypto plugin not initialized";
+ } else if (err != OK) {
+ msg.appendFormat(": general failure (%d)", err);
+ }
+ jniThrowException(env, "android/media/MediaCryptoException", msg.string());
+}
+
static JNINativeMethod gMethods[] = {
{ "release", "()V", (void *)android_media_MediaCrypto_release },
{ "native_init", "()V", (void *)android_media_MediaCrypto_native_init },
@@ -289,6 +329,9 @@ static JNINativeMethod gMethods[] = {
{ "requiresSecureDecoderComponent", "(Ljava/lang/String;)Z",
(void *)android_media_MediaCrypto_requiresSecureDecoderComponent },
+
+ { "setMediaDrmSession", "([B)V",
+ (void *)android_media_MediaCrypto_setMediaDrmSession },
};
int register_android_media_Crypto(JNIEnv *env) {
diff --git a/media/jni/android_media_MediaDataSource.cpp b/media/jni/android_media_MediaDataSource.cpp
new file mode 100644
index 0000000..025133f
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "JMediaDataSource-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaDataSource.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/Log.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+JMediaDataSource::JMediaDataSource(JNIEnv* env, jobject source)
+ : mJavaObjStatus(OK), mSizeIsCached(false), mCachedSize(0), mMemory(NULL) {
+ mMediaDataSourceObj = env->NewGlobalRef(source);
+ CHECK(mMediaDataSourceObj != NULL);
+
+ ScopedLocalRef<jclass> mediaDataSourceClass(env, env->GetObjectClass(mMediaDataSourceObj));
+ CHECK(mediaDataSourceClass.get() != NULL);
+
+ mReadMethod = env->GetMethodID(mediaDataSourceClass.get(), "readAt", "(J[BII)I");
+ CHECK(mReadMethod != NULL);
+ mGetSizeMethod = env->GetMethodID(mediaDataSourceClass.get(), "getSize", "()J");
+ CHECK(mGetSizeMethod != NULL);
+ mCloseMethod = env->GetMethodID(mediaDataSourceClass.get(), "close", "()V");
+ CHECK(mCloseMethod != NULL);
+
+ ScopedLocalRef<jbyteArray> tmp(env, env->NewByteArray(kBufferSize));
+ mByteArrayObj = (jbyteArray)env->NewGlobalRef(tmp.get());
+ CHECK(mByteArrayObj != NULL);
+
+ sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "JMediaDataSource");
+ mMemory = memoryDealer->allocate(kBufferSize);
+ if (mMemory == NULL) {
+ ALOGE("Failed to allocate memory!");
+ }
+}
+
+JMediaDataSource::~JMediaDataSource() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->DeleteGlobalRef(mMediaDataSourceObj);
+ env->DeleteGlobalRef(mByteArrayObj);
+}
+
+sp<IMemory> JMediaDataSource::getIMemory() {
+ Mutex::Autolock lock(mLock);
+ return mMemory;
+}
+
+ssize_t JMediaDataSource::readAt(off64_t offset, size_t size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK || mMemory == NULL) {
+ return -1;
+ }
+ if (size > kBufferSize) {
+ size = kBufferSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint numread = env->CallIntMethod(mMediaDataSourceObj, mReadMethod,
+ (jlong)offset, mByteArrayObj, (jint)0, (jint)size);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in readAt()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+ if (numread < 0) {
+ if (numread != -1) {
+ ALOGW("An error occurred in readAt()");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ } else {
+ // numread == -1 indicates EOF
+ return 0;
+ }
+ }
+ if ((size_t)numread > size) {
+ ALOGE("readAt read too many bytes.");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+
+ ALOGV("readAt %lld / %zu => %d.", (long long)offset, size, numread);
+ env->GetByteArrayRegion(mByteArrayObj, 0, numread, (jbyte*)mMemory->pointer());
+ return numread;
+}
+
+status_t JMediaDataSource::getSize(off64_t* size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK) {
+ return UNKNOWN_ERROR;
+ }
+ if (mSizeIsCached) {
+ return mCachedSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ *size = env->CallLongMethod(mMediaDataSourceObj, mGetSizeMethod);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in getSize()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ // After returning an error, size shouldn't be used by callers.
+ *size = UNKNOWN_ERROR;
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return UNKNOWN_ERROR;
+ }
+
+ // The minimum size should be -1, which indicates unknown size.
+ if (*size < 0) {
+ *size = -1;
+ }
+
+ mCachedSize = *size;
+ mSizeIsCached = true;
+ return OK;
+}
+
+void JMediaDataSource::close() {
+ Mutex::Autolock lock(mLock);
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(mMediaDataSourceObj, mCloseMethod);
+ // The closed state is effectively the same as an error state.
+ mJavaObjStatus = UNKNOWN_ERROR;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_MediaDataSource.h b/media/jni/android_media_MediaDataSource.h
new file mode 100644
index 0000000..2bc237e
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIADATASOURCE_H_
+#define _ANDROID_MEDIA_MEDIADATASOURCE_H_
+
+#include "jni.h"
+
+#include <media/IDataSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+// The native counterpart to a Java android.media.MediaDataSource. It inherits from
+// IDataSource so that it can be accessed remotely.
+//
+// If the java DataSource returns an error or throws an exception it
+// will be considered to be in a broken state, and the only further call this
+// will make is to close().
+class JMediaDataSource : public BnDataSource {
+public:
+ enum {
+ kBufferSize = 64 * 1024,
+ };
+
+ JMediaDataSource(JNIEnv *env, jobject source);
+ virtual ~JMediaDataSource();
+
+ virtual sp<IMemory> getIMemory();
+ virtual ssize_t readAt(off64_t offset, size_t size);
+ virtual status_t getSize(off64_t* size);
+ virtual void close();
+
+private:
+ // Protect all member variables with mLock because this object will be
+ // accessed on different binder worker threads.
+ Mutex mLock;
+
+ // The status of the java DataSource. Set to OK unless an error occurred or
+ // close() was called.
+ status_t mJavaObjStatus;
+ // Only call the java getSize() once so the app can't change the size on us.
+ bool mSizeIsCached;
+ off64_t mCachedSize;
+ sp<IMemory> mMemory;
+
+ jobject mMediaDataSourceObj;
+ jmethodID mReadMethod;
+ jmethodID mGetSizeMethod;
+ jmethodID mCloseMethod;
+ jbyteArray mByteArrayObj;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaDataSource);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIADATASOURCE_H_
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index 5578416..d456dc1 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -59,6 +59,7 @@ namespace android {
struct RequestFields {
jfieldID data;
jfieldID defaultUrl;
+ jfieldID requestType;
};
struct ArrayListFields {
@@ -92,14 +93,27 @@ struct EventTypes {
jint kEventKeyRequired;
jint kEventKeyExpired;
jint kEventVendorDefined;
+ jint kEventSessionReclaimed;
} gEventTypes;
+struct EventWhat {
+ jint kWhatDrmEvent;
+ jint kWhatExpirationUpdate;
+ jint kWhatKeyStatusChange;
+} gEventWhat;
+
struct KeyTypes {
jint kKeyTypeStreaming;
jint kKeyTypeOffline;
jint kKeyTypeRelease;
} gKeyTypes;
+struct KeyRequestTypes {
+ jint kKeyRequestTypeInitial;
+ jint kKeyRequestTypeRenewal;
+ jint kKeyRequestTypeRelease;
+} gKeyRequestTypes;
+
struct CertificateTypes {
jint kCertificateTypeNone;
jint kCertificateTypeX509;
@@ -178,22 +192,37 @@ JNIDrmListener::~JNIDrmListener()
void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
const Parcel *obj)
{
- jint jeventType;
+ jint jwhat;
+ jint jeventType = 0;
// translate DrmPlugin event types into their java equivalents
- switch(eventType) {
+ switch (eventType) {
case DrmPlugin::kDrmPluginEventProvisionRequired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventProvisionRequired;
break;
case DrmPlugin::kDrmPluginEventKeyNeeded:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyRequired;
break;
case DrmPlugin::kDrmPluginEventKeyExpired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyExpired;
break;
case DrmPlugin::kDrmPluginEventVendorDefined:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventVendorDefined;
break;
+ case DrmPlugin::kDrmPluginEventSessionReclaimed:
+ jwhat = gEventWhat.kWhatDrmEvent;
+ jeventType = gEventTypes.kEventSessionReclaimed;
+ break;
+ case DrmPlugin::kDrmPluginEventExpirationUpdate:
+ jwhat = gEventWhat.kWhatExpirationUpdate;
+ break;
+ case DrmPlugin::kDrmPluginEventKeysChange:
+ jwhat = gEventWhat.kWhatKeyStatusChange;
+ break;
default:
ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
return;
@@ -206,7 +235,7 @@ void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
Parcel* nativeParcel = parcelForJavaObject(env, jParcel);
nativeParcel->setData(obj->data(), obj->dataSize());
env->CallStaticVoidMethod(mClass, gFields.post_event, mObject,
- jeventType, extra, jParcel);
+ jwhat, jeventType, extra, jParcel);
env->DeleteLocalRef(jParcel);
}
}
@@ -232,7 +261,7 @@ static bool throwExceptionAsNecessary(
const char *drmMessage = NULL;
- switch(err) {
+ switch (err) {
case ERROR_DRM_UNKNOWN:
drmMessage = "General DRM error";
break;
@@ -562,7 +591,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm");
GET_FIELD_ID(gFields.context, clazz, "mNativeContext", "J");
GET_STATIC_METHOD_ID(gFields.post_event, clazz, "postEventFromNative",
- "(Ljava/lang/Object;IILjava/lang/Object;)V");
+ "(Ljava/lang/Object;IIILjava/lang/Object;)V");
jfieldID field;
GET_STATIC_FIELD_ID(field, clazz, "EVENT_PROVISION_REQUIRED", "I");
@@ -573,6 +602,15 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
gEventTypes.kEventKeyExpired = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "EVENT_VENDOR_DEFINED", "I");
gEventTypes.kEventVendorDefined = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "EVENT_SESSION_RECLAIMED", "I");
+ gEventTypes.kEventSessionReclaimed = env->GetStaticIntField(clazz, field);
+
+ GET_STATIC_FIELD_ID(field, clazz, "DRM_EVENT", "I");
+ gEventWhat.kWhatDrmEvent = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "EXPIRATION_UPDATE", "I");
+ gEventWhat.kWhatExpirationUpdate = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "KEY_STATUS_CHANGE", "I");
+ gEventWhat.kWhatKeyStatusChange = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_STREAMING", "I");
gKeyTypes.kKeyTypeStreaming = env->GetStaticIntField(clazz, field);
@@ -589,6 +627,14 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm$KeyRequest");
GET_FIELD_ID(gFields.keyRequest.data, clazz, "mData", "[B");
GET_FIELD_ID(gFields.keyRequest.defaultUrl, clazz, "mDefaultUrl", "Ljava/lang/String;");
+ GET_FIELD_ID(gFields.keyRequest.requestType, clazz, "mRequestType", "I");
+
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_INITIAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeInitial = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RENEWAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeRenewal = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RELEASE", "I");
+ gKeyRequestTypes.kKeyRequestTypeRelease = env->GetStaticIntField(clazz, field);
FIND_CLASS(clazz, "android/media/MediaDrm$ProvisionRequest");
GET_FIELD_ID(gFields.provisionRequest.data, clazz, "mData", "[B");
@@ -780,9 +826,10 @@ static jobject android_media_MediaDrm_getKeyRequest(
Vector<uint8_t> request;
String8 defaultUrl;
+ DrmPlugin::KeyRequestType keyRequestType;
status_t err = drm->getKeyRequest(sessionId, initData, mimeType,
- keyType, optParams, request, defaultUrl);
+ keyType, optParams, request, defaultUrl, &keyRequestType);
if (throwExceptionAsNecessary(env, err, "Failed to get key request")) {
return NULL;
@@ -801,6 +848,25 @@ static jobject android_media_MediaDrm_getKeyRequest(
jstring jdefaultUrl = env->NewStringUTF(defaultUrl.string());
env->SetObjectField(keyObj, gFields.keyRequest.defaultUrl, jdefaultUrl);
+
+ switch (keyRequestType) {
+ case DrmPlugin::kKeyRequestType_Initial:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeInitial);
+ break;
+ case DrmPlugin::kKeyRequestType_Renewal:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRenewal);
+ break;
+ case DrmPlugin::kKeyRequestType_Release:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRelease);
+ break;
+ default:
+ throwStateException(env, "DRM plugin failure: unknown key request type",
+ ERROR_DRM_UNKNOWN);
+ break;
+ }
}
return keyObj;
diff --git a/media/jni/android_media_MediaExtractor.cpp b/media/jni/android_media_MediaExtractor.cpp
index c0795b6..4e9b726 100644
--- a/media/jni/android_media_MediaExtractor.cpp
+++ b/media/jni/android_media_MediaExtractor.cpp
@@ -25,6 +25,7 @@
#include "android_runtime/Log.h"
#include "jni.h"
#include "JNIHelp.h"
+#include "android_media_MediaDataSource.h"
#include <media/IMediaHTTPService.h>
#include <media/hardware/CryptoAPI.h>
@@ -50,74 +51,6 @@ struct fields_t {
static fields_t gFields;
-class JavaDataSourceBridge : public DataSource {
- jmethodID mReadMethod;
- jmethodID mGetSizeMethod;
- jmethodID mCloseMethod;
- jobject mDataSource;
- public:
- JavaDataSourceBridge(JNIEnv *env, jobject source) {
- mDataSource = env->NewGlobalRef(source);
-
- jclass datasourceclass = env->GetObjectClass(mDataSource);
- CHECK(datasourceclass != NULL);
-
- mReadMethod = env->GetMethodID(datasourceclass, "readAt", "(J[BI)I");
- CHECK(mReadMethod != NULL);
-
- mGetSizeMethod = env->GetMethodID(datasourceclass, "getSize", "()J");
- CHECK(mGetSizeMethod != NULL);
-
- mCloseMethod = env->GetMethodID(datasourceclass, "close", "()V");
- CHECK(mCloseMethod != NULL);
- }
-
- ~JavaDataSourceBridge() {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
- env->CallVoidMethod(mDataSource, mCloseMethod);
- env->DeleteGlobalRef(mDataSource);
- }
-
- virtual status_t initCheck() const {
- return OK;
- }
-
- virtual ssize_t readAt(off64_t offset, void* buffer, size_t size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- // XXX could optimize this by reusing the same array
- jbyteArray byteArrayObj = env->NewByteArray(size);
- env->DeleteLocalRef(env->GetObjectClass(mDataSource));
- env->DeleteLocalRef(env->GetObjectClass(byteArrayObj));
- ssize_t numread = env->CallIntMethod(mDataSource, mReadMethod, offset, byteArrayObj, (jint)size);
- env->GetByteArrayRegion(byteArrayObj, 0, size, (jbyte*) buffer);
- env->DeleteLocalRef(byteArrayObj);
- if (env->ExceptionCheck()) {
- ALOGW("Exception occurred while reading %zu at %lld", size, (long long)offset);
- LOGW_EX(env);
- env->ExceptionClear();
- return -1;
- }
- return numread;
- }
-
- virtual status_t getSize(off64_t *size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- CHECK(size != NULL);
-
- int64_t len = env->CallLongMethod(mDataSource, mGetSizeMethod);
- if (len < 0) {
- *size = ERROR_UNSUPPORTED;
- } else {
- *size = len;
- }
- return OK;
- }
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
JMediaExtractor::JMediaExtractor(JNIEnv *env, jobject thiz)
: mClass(NULL),
mObject(NULL) {
@@ -777,10 +710,16 @@ static void android_media_MediaExtractor_setDataSourceCallback(
return;
}
- sp<JavaDataSourceBridge> bridge = new JavaDataSourceBridge(env, callbackObj);
+ sp<DataSource> bridge =
+ DataSource::CreateFromIDataSource(new JMediaDataSource(env, callbackObj));
status_t err = extractor->setDataSource(bridge);
if (err != OK) {
+ // Clear bridge so that JMediaDataSource::close() is called _before_
+ // we throw the IOException.
+ // Otherwise close() gets called when we go out of scope, it calls
+ // Java with a pending exception and crashes the process.
+ bridge.clear();
jniThrowException(
env,
"java/io/IOException",
@@ -881,7 +820,7 @@ static JNINativeMethod gMethods[] = {
{ "setDataSource", "(Ljava/io/FileDescriptor;JJ)V",
(void *)android_media_MediaExtractor_setDataSourceFd },
- { "setDataSource", "(Landroid/media/DataSource;)V",
+ { "setDataSource", "(Landroid/media/MediaDataSource;)V",
(void *)android_media_MediaExtractor_setDataSourceCallback },
{ "getCachedDuration", "()J",
diff --git a/media/jni/android_media_MediaHTTPConnection.cpp b/media/jni/android_media_MediaHTTPConnection.cpp
index 7226ef5..393003d 100644
--- a/media/jni/android_media_MediaHTTPConnection.cpp
+++ b/media/jni/android_media_MediaHTTPConnection.cpp
@@ -134,7 +134,6 @@ static jobject android_media_MediaHTTPConnection_native_getIMemory(
static jint android_media_MediaHTTPConnection_native_readAt(
JNIEnv *env, jobject thiz, jlong offset, jint size) {
sp<JMediaHTTPConnection> conn = getObject(env, thiz);
-
if (size > JMediaHTTPConnection::kBufferSize) {
size = JMediaHTTPConnection::kBufferSize;
}
diff --git a/media/jni/android_media_MediaMetadataRetriever.cpp b/media/jni/android_media_MediaMetadataRetriever.cpp
index fc7931e..59fb6d6 100644
--- a/media/jni/android_media_MediaMetadataRetriever.cpp
+++ b/media/jni/android_media_MediaMetadataRetriever.cpp
@@ -30,16 +30,17 @@
#include "jni.h"
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
+#include "android_media_MediaDataSource.h"
#include "android_media_Utils.h"
#include "android_util_Binder.h"
+#include "android/graphics/GraphicsJNI.h"
using namespace android;
struct fields_t {
jfieldID context;
jclass bitmapClazz; // Must be a global ref
- jfieldID nativeBitmap;
jmethodID createBitmapMethod;
jmethodID createScaledBitmapMethod;
jclass configClazz; // Must be a global ref
@@ -171,6 +172,23 @@ static void android_media_MediaMetadataRetriever_setDataSourceFD(JNIEnv *env, jo
process_media_retriever_call(env, retriever->setDataSource(fd, offset, length), "java/lang/RuntimeException", "setDataSource failed");
}
+static void android_media_MediaMetadataRetriever_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ ALOGV("setDataSourceCallback");
+ MediaMetadataRetriever* retriever = getRetriever(env, thiz);
+ if (retriever == 0) {
+ jniThrowException(env, "java/lang/IllegalStateException", "No retriever available");
+ return;
+ }
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_retriever_call(env, retriever->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed");
+}
+
template<typename T>
static void rotate0(T* dst, const T* src, size_t width, size_t height)
{
@@ -254,7 +272,7 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtTime(JNIEnv *env,
jobject config = env->CallStaticObjectMethod(
fields.configClazz,
fields.createConfigMethod,
- SkBitmap::kRGB_565_Config);
+ GraphicsJNI::colorTypeToLegacyBitmapConfig(kRGB_565_SkColorType));
uint32_t width, height;
bool swapWidthAndHeight = false;
@@ -281,16 +299,16 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtTime(JNIEnv *env,
return NULL;
}
- SkBitmap *bitmap =
- (SkBitmap *) env->GetLongField(jBitmap, fields.nativeBitmap);
+ SkBitmap bitmap;
+ GraphicsJNI::getSkBitmap(env, jBitmap, &bitmap);
- bitmap->lockPixels();
- rotate((uint16_t*)bitmap->getPixels(),
+ bitmap.lockPixels();
+ rotate((uint16_t*)bitmap.getPixels(),
(uint16_t*)((char*)videoFrame + sizeof(VideoFrame)),
videoFrame->mWidth,
videoFrame->mHeight,
videoFrame->mRotationAngle);
- bitmap->unlockPixels();
+ bitmap.unlockPixels();
if (videoFrame->mDisplayWidth != videoFrame->mWidth ||
videoFrame->mDisplayHeight != videoFrame->mHeight) {
@@ -420,10 +438,6 @@ static void android_media_MediaMetadataRetriever_native_init(JNIEnv *env)
if (fields.createScaledBitmapMethod == NULL) {
return;
}
- fields.nativeBitmap = env->GetFieldID(fields.bitmapClazz, "mNativeBitmap", "J");
- if (fields.nativeBitmap == NULL) {
- return;
- }
jclass configClazz = env->FindClass("android/graphics/Bitmap$Config");
if (configClazz == NULL) {
@@ -461,6 +475,7 @@ static JNINativeMethod nativeMethods[] = {
},
{"setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaMetadataRetriever_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V", (void *)android_media_MediaMetadataRetriever_setDataSourceCallback},
{"_getFrameAtTime", "(JI)Landroid/graphics/Bitmap;", (void *)android_media_MediaMetadataRetriever_getFrameAtTime},
{"extractMetadata", "(I)Ljava/lang/String;", (void *)android_media_MediaMetadataRetriever_extractMetadata},
{"getEmbeddedPicture", "(I)[B", (void *)android_media_MediaMetadataRetriever_getEmbeddedPicture},
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 820de5b..9c67278 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -20,6 +20,7 @@
#include "utils/Log.h"
#include <media/mediaplayer.h>
+#include <media/AudioResamplerPublic.h>
#include <media/IMediaHTTPService.h>
#include <media/MediaPlayerInterface.h>
#include <stdio.h>
@@ -36,6 +37,9 @@
#include "utils/Errors.h" // for status_t
#include "utils/KeyedVector.h"
#include "utils/String8.h"
+#include "android_media_MediaDataSource.h"
+#include "android_media_PlaybackParams.h"
+#include "android_media_SyncParams.h"
#include "android_media_Utils.h"
#include "android_os_Parcel.h"
@@ -65,6 +69,9 @@ struct fields_t {
};
static fields_t fields;
+static PlaybackParams::fields_t gPlaybackParamsFields;
+static SyncParams::fields_t gSyncParamsFields;
+
static Mutex sLock;
// ----------------------------------------------------------------------------
@@ -167,6 +174,8 @@ static void process_media_player_call(JNIEnv *env, jobject thiz, status_t opStat
} else { // Throw exception!
if ( opStatus == (status_t) INVALID_OPERATION ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ } else if ( opStatus == (status_t) BAD_VALUE ) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
} else if ( opStatus == (status_t) PERMISSION_DENIED ) {
jniThrowException(env, "java/lang/SecurityException", NULL);
} else if ( opStatus != (status_t) OK ) {
@@ -251,6 +260,23 @@ android_media_MediaPlayer_setDataSourceFD(JNIEnv *env, jobject thiz, jobject fil
process_media_player_call( env, thiz, mp->setDataSource(fd, offset, length), "java/io/IOException", "setDataSourceFD failed." );
}
+static void
+android_media_MediaPlayer_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_player_call(env, thiz, mp->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed." );
+}
+
static sp<IGraphicBufferProducer>
getVideoSurfaceTexture(JNIEnv* env, jobject thiz) {
IGraphicBufferProducer * const p = (IGraphicBufferProducer*)env->GetLongField(thiz, fields.surface_texture);
@@ -402,6 +428,155 @@ android_media_MediaPlayer_isPlaying(JNIEnv *env, jobject thiz)
}
static void
+android_media_MediaPlayer_setPlaybackParams(JNIEnv *env, jobject thiz, jobject params)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ PlaybackParams pbp;
+ pbp.fillFromJobject(env, gPlaybackParamsFields, params);
+ ALOGV("setPlaybackParams: %d:%f %d:%f %d:%u %d:%u",
+ pbp.speedSet, pbp.audioRate.mSpeed,
+ pbp.pitchSet, pbp.audioRate.mPitch,
+ pbp.audioFallbackModeSet, pbp.audioRate.mFallbackMode,
+ pbp.audioStretchModeSet, pbp.audioRate.mStretchMode);
+
+ AudioPlaybackRate rate;
+ status_t err = mp->getPlaybackSettings(&rate);
+ if (err == OK) {
+ bool updatedRate = false;
+ if (pbp.speedSet) {
+ rate.mSpeed = pbp.audioRate.mSpeed;
+ updatedRate = true;
+ }
+ if (pbp.pitchSet) {
+ rate.mPitch = pbp.audioRate.mPitch;
+ updatedRate = true;
+ }
+ if (pbp.audioFallbackModeSet) {
+ rate.mFallbackMode = pbp.audioRate.mFallbackMode;
+ updatedRate = true;
+ }
+ if (pbp.audioStretchModeSet) {
+ rate.mStretchMode = pbp.audioRate.mStretchMode;
+ updatedRate = true;
+ }
+ if (updatedRate) {
+ err = mp->setPlaybackSettings(rate);
+ }
+ }
+ process_media_player_call(
+ env, thiz, err,
+ "java/lang/IllegalStateException", "unexpected error");
+}
+
+static jobject
+android_media_MediaPlayer_getPlaybackParams(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ PlaybackParams pbp;
+ AudioPlaybackRate &audioRate = pbp.audioRate;
+ process_media_player_call(
+ env, thiz, mp->getPlaybackSettings(&audioRate),
+ "java/lang/IllegalStateException", "unexpected error");
+ ALOGV("getPlaybackSettings: %f %f %d %d",
+ audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
+
+ pbp.speedSet = true;
+ pbp.pitchSet = true;
+ pbp.audioFallbackModeSet = true;
+ pbp.audioStretchModeSet = true;
+
+ return pbp.asJobject(env, gPlaybackParamsFields);
+}
+
+static void
+android_media_MediaPlayer_setSyncParams(JNIEnv *env, jobject thiz, jobject params)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ SyncParams scp;
+ scp.fillFromJobject(env, gSyncParamsFields, params);
+ ALOGV("setSyncParams: %d:%d %d:%d %d:%f %d:%f",
+ scp.syncSourceSet, scp.sync.mSource,
+ scp.audioAdjustModeSet, scp.sync.mAudioAdjustMode,
+ scp.toleranceSet, scp.sync.mTolerance,
+ scp.frameRateSet, scp.frameRate);
+
+ AVSyncSettings avsync;
+ float videoFrameRate;
+ status_t err = mp->getSyncSettings(&avsync, &videoFrameRate);
+ if (err == OK) {
+ bool updatedSync = scp.frameRateSet;
+ if (scp.syncSourceSet) {
+ avsync.mSource = scp.sync.mSource;
+ updatedSync = true;
+ }
+ if (scp.audioAdjustModeSet) {
+ avsync.mAudioAdjustMode = scp.sync.mAudioAdjustMode;
+ updatedSync = true;
+ }
+ if (scp.toleranceSet) {
+ avsync.mTolerance = scp.sync.mTolerance;
+ updatedSync = true;
+ }
+ if (updatedSync) {
+ err = mp->setSyncSettings(avsync, scp.frameRateSet ? scp.frameRate : -1.f);
+ }
+ }
+ process_media_player_call(
+ env, thiz, err,
+ "java/lang/IllegalStateException", "unexpected error");
+}
+
+static jobject
+android_media_MediaPlayer_getSyncParams(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ SyncParams scp;
+ scp.frameRate = -1.f;
+ process_media_player_call(
+ env, thiz, mp->getSyncSettings(&scp.sync, &scp.frameRate),
+ "java/lang/IllegalStateException", "unexpected error");
+
+ ALOGV("getSyncSettings: %d %d %f %f",
+ scp.sync.mSource, scp.sync.mAudioAdjustMode, scp.sync.mTolerance, scp.frameRate);
+
+ // sanity check params
+ if (scp.sync.mSource >= AVSYNC_SOURCE_MAX
+ || scp.sync.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
+ || scp.sync.mTolerance < 0.f
+ || scp.sync.mTolerance >= AVSYNC_TOLERANCE_MAX) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ scp.syncSourceSet = true;
+ scp.audioAdjustModeSet = true;
+ scp.toleranceSet = true;
+ scp.frameRateSet = scp.frameRate >= 0.f;
+
+ return scp.asJobject(env, gSyncParamsFields);
+}
+
+static void
android_media_MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)
{
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
@@ -667,6 +842,8 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
return;
}
+ env->DeleteLocalRef(clazz);
+
clazz = env->FindClass("android/net/ProxyInfo");
if (clazz == NULL) {
return;
@@ -680,6 +857,11 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
fields.proxyConfigGetExclusionList =
env->GetMethodID(clazz, "getExclusionListAsString", "()Ljava/lang/String;");
+
+ env->DeleteLocalRef(clazz);
+
+ gPlaybackParamsFields.init(env);
+ gSyncParamsFields.init(env);
}
static void
@@ -859,14 +1041,19 @@ static JNINativeMethod gMethods[] = {
(void *)android_media_MediaPlayer_setDataSourceAndHeaders
},
- {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V",(void *)android_media_MediaPlayer_setDataSourceCallback },
{"_setVideoSurface", "(Landroid/view/Surface;)V", (void *)android_media_MediaPlayer_setVideoSurface},
{"_prepare", "()V", (void *)android_media_MediaPlayer_prepare},
- {"prepareAsync", "()V", (void *)android_media_MediaPlayer_prepareAsync},
+ {"_prepareAsync", "()V", (void *)android_media_MediaPlayer_prepareAsync},
{"_start", "()V", (void *)android_media_MediaPlayer_start},
{"_stop", "()V", (void *)android_media_MediaPlayer_stop},
{"getVideoWidth", "()I", (void *)android_media_MediaPlayer_getVideoWidth},
{"getVideoHeight", "()I", (void *)android_media_MediaPlayer_getVideoHeight},
+ {"setPlaybackParams", "(Landroid/media/PlaybackParams;)V", (void *)android_media_MediaPlayer_setPlaybackParams},
+ {"getPlaybackParams", "()Landroid/media/PlaybackParams;", (void *)android_media_MediaPlayer_getPlaybackParams},
+ {"setSyncParams", "(Landroid/media/SyncParams;)V", (void *)android_media_MediaPlayer_setSyncParams},
+ {"getSyncParams", "()Landroid/media/SyncParams;", (void *)android_media_MediaPlayer_getSyncParams},
{"seekTo", "(I)V", (void *)android_media_MediaPlayer_seekTo},
{"_pause", "()V", (void *)android_media_MediaPlayer_pause},
{"isPlaying", "()Z", (void *)android_media_MediaPlayer_isPlaying},
@@ -901,8 +1088,8 @@ static int register_android_media_MediaPlayer(JNIEnv *env)
return AndroidRuntime::registerNativeMethods(env,
"android/media/MediaPlayer", gMethods, NELEM(gMethods));
}
-
extern int register_android_media_ImageReader(JNIEnv *env);
+extern int register_android_media_ImageWriter(JNIEnv *env);
extern int register_android_media_Crypto(JNIEnv *env);
extern int register_android_media_Drm(JNIEnv *env);
extern int register_android_media_MediaCodec(JNIEnv *env);
@@ -913,6 +1100,7 @@ extern int register_android_media_MediaMetadataRetriever(JNIEnv *env);
extern int register_android_media_MediaMuxer(JNIEnv *env);
extern int register_android_media_MediaRecorder(JNIEnv *env);
extern int register_android_media_MediaScanner(JNIEnv *env);
+extern int register_android_media_MediaSync(JNIEnv *env);
extern int register_android_media_ResampleInputStream(JNIEnv *env);
extern int register_android_media_MediaProfiles(JNIEnv *env);
extern int register_android_media_AmrInputStream(JNIEnv *env);
@@ -931,6 +1119,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
}
assert(env != NULL);
+ if (register_android_media_ImageWriter(env) != JNI_OK) {
+ ALOGE("ERROR: ImageWriter native registration failed");
+ goto bail;
+ }
+
if (register_android_media_ImageReader(env) < 0) {
ALOGE("ERROR: ImageReader native registration failed");
goto bail;
@@ -991,6 +1184,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
goto bail;
}
+ if (register_android_media_MediaSync(env) < 0) {
+ ALOGE("ERROR: MediaSync native registration failed");
+ goto bail;
+ }
+
if (register_android_media_MediaExtractor(env) < 0) {
ALOGE("ERROR: MediaCodec native registration failed");
goto bail;
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index 8b7d40d..f60af63 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -29,8 +29,11 @@
#include <camera/ICameraService.h>
#include <camera/Camera.h>
#include <media/mediarecorder.h>
+#include <media/stagefright/PersistentSurface.h>
#include <utils/threads.h>
+#include <ScopedUtfChars.h>
+
#include "jni.h"
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
@@ -46,6 +49,8 @@ using namespace android;
// helper function to extract a native Camera object from a Camera Java object
extern sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, struct JNICameraContext** context);
+extern sp<PersistentSurface>
+android_media_MediaCodec_getPersistentInputSurface(JNIEnv* env, jobject object);
struct fields_t {
jfieldID context;
@@ -113,6 +118,12 @@ static sp<Surface> get_surface(JNIEnv* env, jobject clazz)
return android_view_Surface_getSurface(env, clazz);
}
+static sp<PersistentSurface> get_persistentSurface(JNIEnv* env, jobject object)
+{
+ ALOGV("get_persistentSurface");
+ return android_media_MediaCodec_getPersistentInputSurface(env, object);
+}
+
// Returns true if it throws an exception.
static bool process_media_recorder_call(JNIEnv *env, status_t opStatus, const char* exception, const char* message)
{
@@ -444,11 +455,13 @@ android_media_MediaRecorder_native_init(JNIEnv *env)
static void
android_media_MediaRecorder_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jstring packageName)
+ jstring packageName, jstring opPackageName)
{
ALOGV("setup");
- sp<MediaRecorder> mr = new MediaRecorder();
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
+ sp<MediaRecorder> mr = new MediaRecorder(String16(opPackageNameStr.c_str()));
if (mr == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
return;
@@ -483,6 +496,18 @@ android_media_MediaRecorder_native_finalize(JNIEnv *env, jobject thiz)
android_media_MediaRecorder_release(env, thiz);
}
+void android_media_MediaRecorder_setInputSurface(
+ JNIEnv* env, jobject thiz, jobject object) {
+ ALOGV("android_media_MediaRecorder_setInputSurface");
+
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+
+ sp<PersistentSurface> persistentSurface = get_persistentSurface(env, object);
+
+ process_media_recorder_call(env, mr->setInputSurface(persistentSurface),
+ "java/lang/IllegalArgumentException", "native_setInputSurface failed.");
+}
+
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
@@ -506,8 +531,10 @@ static JNINativeMethod gMethods[] = {
{"native_reset", "()V", (void *)android_media_MediaRecorder_native_reset},
{"release", "()V", (void *)android_media_MediaRecorder_release},
{"native_init", "()V", (void *)android_media_MediaRecorder_native_init},
- {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;)V", (void *)android_media_MediaRecorder_native_setup},
+ {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;)V",
+ (void *)android_media_MediaRecorder_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaRecorder_native_finalize},
+ {"native_setInputSurface", "(Landroid/view/Surface;)V", (void *)android_media_MediaRecorder_setInputSurface },
};
// This function only registers the native methods, and is called from
diff --git a/media/jni/android_media_MediaSync.cpp b/media/jni/android_media_MediaSync.cpp
new file mode 100644
index 0000000..8e0ed64
--- /dev/null
+++ b/media/jni/android_media_MediaSync.cpp
@@ -0,0 +1,551 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSync-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaSync.h"
+
+#include "android_media_AudioTrack.h"
+#include "android_media_PlaybackParams.h"
+#include "android_media_SyncParams.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/android_view_Surface.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <gui/Surface.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+struct fields_t {
+ jfieldID context;
+ jfieldID mediaTimestampMediaTimeUsID;
+ jfieldID mediaTimestampNanoTimeID;
+ jfieldID mediaTimestampClockRateID;
+};
+
+static fields_t gFields;
+static PlaybackParams::fields_t gPlaybackParamsFields;
+static SyncParams::fields_t gSyncParamsFields;
+
+////////////////////////////////////////////////////////////////////////////////
+
+JMediaSync::JMediaSync() {
+ mSync = MediaSync::create();
+}
+
+JMediaSync::~JMediaSync() {
+}
+
+status_t JMediaSync::setSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
+ return mSync->setSurface(bufferProducer);
+}
+
+status_t JMediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) {
+ return mSync->setAudioTrack(audioTrack);
+}
+
+status_t JMediaSync::createInputSurface(
+ sp<IGraphicBufferProducer>* bufferProducer) {
+ return mSync->createInputSurface(bufferProducer);
+}
+
+sp<const MediaClock> JMediaSync::getMediaClock() {
+ return mSync->getMediaClock();
+}
+
+status_t JMediaSync::setPlaybackParams(const AudioPlaybackRate& rate) {
+ return mSync->setPlaybackSettings(rate);
+}
+
+void JMediaSync::getPlaybackParams(AudioPlaybackRate* rate /* nonnull */) {
+ mSync->getPlaybackSettings(rate);
+}
+
+status_t JMediaSync::setSyncParams(const AVSyncSettings& syncParams) {
+ return mSync->setSyncSettings(syncParams);
+}
+
+void JMediaSync::getSyncParams(AVSyncSettings* syncParams /* nonnull */) {
+ mSync->getSyncSettings(syncParams);
+}
+
+status_t JMediaSync::setVideoFrameRateHint(float rate) {
+ return mSync->setVideoFrameRateHint(rate);
+}
+
+float JMediaSync::getVideoFrameRate() {
+ return mSync->getVideoFrameRate();
+}
+
+status_t JMediaSync::updateQueuedAudioData(
+ int sizeInBytes, int64_t presentationTimeUs) {
+ return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+}
+
+status_t JMediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
+ return mSync->getPlayTimeForPendingAudioFrames(outTimeUs);
+}
+
+} // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) {
+ sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context);
+ if (sync != NULL) {
+ sync->incStrong(thiz);
+ }
+ if (old != NULL) {
+ old->decStrong(thiz);
+ }
+
+ env->SetLongField(thiz, gFields.context, (jlong)sync.get());
+
+ return old;
+}
+
+static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) {
+ return (JMediaSync *)env->GetLongField(thiz, gFields.context);
+}
+
+static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) {
+ setMediaSync(env, thiz, NULL);
+}
+
+static void throwExceptionAsNecessary(
+ JNIEnv *env, status_t err, const char *msg = NULL) {
+ switch (err) {
+ case NO_ERROR:
+ break;
+
+ case BAD_VALUE:
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg);
+ break;
+
+ case NO_INIT:
+ case INVALID_OPERATION:
+ default:
+ if (err > 0) {
+ break;
+ }
+ AString msgWithErrorCode(msg);
+ msgWithErrorCode.append(" error:");
+ msgWithErrorCode.append(err);
+ jniThrowException(env, "java/lang/IllegalStateException", msgWithErrorCode.c_str());
+ break;
+ }
+}
+
+static void android_media_MediaSync_native_setSurface(
+ JNIEnv *env, jobject thiz, jobject jsurface) {
+ ALOGV("android_media_MediaSync_setSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ if (jsurface != NULL) {
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface != NULL) {
+ bufferProducer = surface->getIGraphicBufferProducer();
+ } else {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->setSurface(bufferProducer);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Surface has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to connect to surface with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static void android_media_MediaSync_native_setAudioTrack(
+ JNIEnv *env, jobject thiz, jobject jaudioTrack) {
+ ALOGV("android_media_MediaSync_setAudioTrack");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<AudioTrack> audioTrack;
+ if (jaudioTrack != NULL) {
+ audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack);
+ if (audioTrack == NULL) {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->setAudioTrack(audioTrack);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Audio track has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to configure audio track with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static jobject android_media_MediaSync_createInputSurface(
+ JNIEnv* env, jobject thiz) {
+ ALOGV("android_media_MediaSync_createInputSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Tell the MediaSync that we want to use a Surface as input.
+ sp<IGraphicBufferProducer> bufferProducer;
+ status_t err = sync->createInputSurface(&bufferProducer);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Wrap the IGBP in a Java-language Surface.
+ return android_view_Surface_createFromIGraphicBufferProducer(env,
+ bufferProducer);
+}
+
+static void android_media_MediaSync_native_updateQueuedAudioData(
+ JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+}
+
+static jboolean android_media_MediaSync_native_getTimestamp(
+ JNIEnv *env, jobject thiz, jobject timestamp) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return JNI_FALSE;
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return JNI_FALSE;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t mediaUs = 0;
+ if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
+ return JNI_FALSE;
+ }
+
+ env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID,
+ (jlong)mediaUs);
+ env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID,
+ (jlong)(nowUs * 1000));
+ env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID,
+ (jfloat)mediaClock->getPlaybackRate());
+ return JNI_TRUE;
+}
+
+static jlong android_media_MediaSync_native_getPlayTimeForPendingAudioFrames(
+ JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ }
+
+ int64_t playTimeUs = 0;
+ status_t err = sync->getPlayTimeForPendingAudioFrames(&playTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ }
+ return (jlong)playTimeUs;
+}
+
+static jfloat android_media_MediaSync_setPlaybackParams(
+ JNIEnv *env, jobject thiz, jobject params) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return (jfloat)0.f;
+ }
+
+ PlaybackParams pbs;
+ pbs.fillFromJobject(env, gPlaybackParamsFields, params);
+ ALOGV("setPlaybackParams: %d:%f %d:%f %d:%u %d:%u",
+ pbs.speedSet, pbs.audioRate.mSpeed,
+ pbs.pitchSet, pbs.audioRate.mPitch,
+ pbs.audioFallbackModeSet, pbs.audioRate.mFallbackMode,
+ pbs.audioStretchModeSet, pbs.audioRate.mStretchMode);
+
+ AudioPlaybackRate rate;
+ sync->getPlaybackParams(&rate);
+ bool updatedRate = false;
+ if (pbs.speedSet) {
+ rate.mSpeed = pbs.audioRate.mSpeed;
+ updatedRate = true;
+ }
+ if (pbs.pitchSet) {
+ rate.mPitch = pbs.audioRate.mPitch;
+ updatedRate = true;
+ }
+ if (pbs.audioFallbackModeSet) {
+ rate.mFallbackMode = pbs.audioRate.mFallbackMode;
+ updatedRate = true;
+ }
+ if (pbs.audioStretchModeSet) {
+ rate.mStretchMode = pbs.audioRate.mStretchMode;
+ updatedRate = true;
+ }
+ if (updatedRate) {
+ status_t err = sync->setPlaybackParams(rate);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return (jfloat)0.f;
+ }
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return (jfloat)0.f;
+ }
+
+ return (jfloat)mediaClock->getPlaybackRate();
+}
+
+static jobject android_media_MediaSync_getPlaybackParams(
+ JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ PlaybackParams pbs;
+ AudioPlaybackRate &audioRate = pbs.audioRate;
+ sync->getPlaybackParams(&audioRate);
+ ALOGV("getPlaybackParams: %f %f %d %d",
+ audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
+
+ pbs.speedSet = true;
+ pbs.pitchSet = true;
+ pbs.audioFallbackModeSet = true;
+ pbs.audioStretchModeSet = true;
+
+ return pbs.asJobject(env, gPlaybackParamsFields);
+}
+
+static jfloat android_media_MediaSync_setSyncParams(
+ JNIEnv *env, jobject thiz, jobject params) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return (jfloat)0.f;
+ }
+
+ SyncParams scs;
+ scs.fillFromJobject(env, gSyncParamsFields, params);
+ ALOGV("setSyncParams: %d:%d %d:%d %d:%f %d:%f",
+ scs.syncSourceSet, scs.sync.mSource,
+ scs.audioAdjustModeSet, scs.sync.mAudioAdjustMode,
+ scs.toleranceSet, scs.sync.mTolerance,
+ scs.frameRateSet, scs.frameRate);
+
+ AVSyncSettings avsync;
+ sync->getSyncParams(&avsync);
+ bool updatedSync = false;
+ status_t err = OK;
+ if (scs.syncSourceSet) {
+ avsync.mSource = scs.sync.mSource;
+ updatedSync = true;
+ }
+ if (scs.audioAdjustModeSet) {
+ avsync.mAudioAdjustMode = scs.sync.mAudioAdjustMode;
+ updatedSync = true;
+ }
+ if (scs.toleranceSet) {
+ avsync.mTolerance = scs.sync.mTolerance;
+ updatedSync = true;
+ }
+ if (updatedSync) {
+ err = sync->setSyncParams(avsync);
+ }
+
+ if (scs.frameRateSet && err == OK) {
+ err = sync->setVideoFrameRateHint(scs.frameRate);
+ }
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return (jfloat)0.f;
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return (jfloat)0.f;
+ }
+
+ return (jfloat)mediaClock->getPlaybackRate();
+}
+
+static jobject android_media_MediaSync_getSyncParams(JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ SyncParams scs;
+ sync->getSyncParams(&scs.sync);
+ scs.frameRate = sync->getVideoFrameRate();
+
+ ALOGV("getSyncParams: %d %d %f %f",
+ scs.sync.mSource, scs.sync.mAudioAdjustMode, scs.sync.mTolerance, scs.frameRate);
+
+ // sanity check params
+ if (scs.sync.mSource >= AVSYNC_SOURCE_MAX
+ || scs.sync.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
+ || scs.sync.mTolerance < 0.f
+ || scs.sync.mTolerance >= AVSYNC_TOLERANCE_MAX) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ scs.syncSourceSet = true;
+ scs.audioAdjustModeSet = true;
+ scs.toleranceSet = true;
+ scs.frameRateSet = scs.frameRate >= 0.f;
+
+ return scs.asJobject(env, gSyncParamsFields);
+}
+
+static void android_media_MediaSync_native_init(JNIEnv *env) {
+ ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J");
+ CHECK(gFields.context != NULL);
+
+ clazz.reset(env->FindClass("android/media/MediaTimestamp"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.mediaTimestampMediaTimeUsID =
+ env->GetFieldID(clazz.get(), "mediaTimeUs", "J");
+ CHECK(gFields.mediaTimestampMediaTimeUsID != NULL);
+
+ gFields.mediaTimestampNanoTimeID =
+ env->GetFieldID(clazz.get(), "nanoTime", "J");
+ CHECK(gFields.mediaTimestampNanoTimeID != NULL);
+
+ gFields.mediaTimestampClockRateID =
+ env->GetFieldID(clazz.get(), "clockRate", "F");
+ CHECK(gFields.mediaTimestampClockRateID != NULL);
+
+ gSyncParamsFields.init(env);
+ gPlaybackParamsFields.init(env);
+}
+
+static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = new JMediaSync();
+
+ setMediaSync(env, thiz, sync);
+}
+
+static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) {
+ android_media_MediaSync_release(env, thiz);
+}
+
+static JNINativeMethod gMethods[] = {
+ { "native_setSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaSync_native_setSurface },
+
+ { "native_setAudioTrack",
+ "(Landroid/media/AudioTrack;)V",
+ (void *)android_media_MediaSync_native_setAudioTrack },
+
+ { "createInputSurface", "()Landroid/view/Surface;",
+ (void *)android_media_MediaSync_createInputSurface },
+
+ { "native_updateQueuedAudioData",
+ "(IJ)V",
+ (void *)android_media_MediaSync_native_updateQueuedAudioData },
+
+ { "native_getTimestamp",
+ "(Landroid/media/MediaTimestamp;)Z",
+ (void *)android_media_MediaSync_native_getTimestamp },
+
+ { "native_getPlayTimeForPendingAudioFrames",
+ "()J",
+ (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames },
+
+ { "native_init", "()V", (void *)android_media_MediaSync_native_init },
+
+ { "native_setup", "()V", (void *)android_media_MediaSync_native_setup },
+
+ { "native_release", "()V", (void *)android_media_MediaSync_release },
+
+ { "native_setPlaybackParams", "(Landroid/media/PlaybackParams;)F",
+ (void *)android_media_MediaSync_setPlaybackParams },
+
+ { "getPlaybackParams", "()Landroid/media/PlaybackParams;",
+ (void *)android_media_MediaSync_getPlaybackParams },
+
+ { "native_setSyncParams", "(Landroid/media/SyncParams;)F",
+ (void *)android_media_MediaSync_setSyncParams },
+
+ { "getSyncParams", "()Landroid/media/SyncParams;",
+ (void *)android_media_MediaSync_getSyncParams },
+
+ { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize },
+};
+
+int register_android_media_MediaSync(JNIEnv *env) {
+ return AndroidRuntime::registerNativeMethods(
+ env, "android/media/MediaSync", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MediaSync.h b/media/jni/android_media_MediaSync.h
new file mode 100644
index 0000000..22c77c7
--- /dev/null
+++ b/media/jni/android_media_MediaSync.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIASYNC_H_
+#define _ANDROID_MEDIA_MEDIASYNC_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/MediaSync.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct AudioPlaybackRate;
+class AudioTrack;
+class IGraphicBufferProducer;
+struct MediaClock;
+class MediaSync;
+
+struct JMediaSync : public RefBase {
+ JMediaSync();
+
+ status_t setSurface(const sp<IGraphicBufferProducer> &bufferProducer);
+ status_t setAudioTrack(const sp<AudioTrack> &audioTrack);
+
+ status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
+
+ status_t updateQueuedAudioData(int sizeInBytes, int64_t presentationTimeUs);
+
+ status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
+
+ status_t setPlaybackParams(const AudioPlaybackRate& rate);
+ void getPlaybackParams(AudioPlaybackRate* rate /* nonnull */);
+ status_t setSyncParams(const AVSyncSettings& syncParams);
+ void getSyncParams(AVSyncSettings* syncParams /* nonnull */);
+ status_t setVideoFrameRateHint(float rate);
+ float getVideoFrameRate();
+
+ sp<const MediaClock> getMediaClock();
+
+protected:
+ virtual ~JMediaSync();
+
+private:
+ sp<MediaSync> mSync;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaSync);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIASYNC_H_
diff --git a/media/jni/android_media_PlaybackParams.h b/media/jni/android_media_PlaybackParams.h
new file mode 100644
index 0000000..5bf13e9
--- /dev/null
+++ b/media/jni/android_media_PlaybackParams.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_PLAYBACK_PARAMS_H_
+#define _ANDROID_MEDIA_PLAYBACK_PARAMS_H_
+
+#include <media/AudioResamplerPublic.h>
+
+namespace android {
+
+// This entire class is inline as it is used from both core and media
+struct PlaybackParams {
+ AudioPlaybackRate audioRate;
+ bool speedSet;
+ bool pitchSet;
+ bool audioFallbackModeSet;
+ bool audioStretchModeSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID speed;
+ jfieldID pitch;
+ jfieldID audio_fallback_mode;
+ jfieldID audio_stretch_mode;
+ jfieldID set;
+ jint set_speed;
+ jint set_pitch;
+ jint set_audio_fallback_mode;
+ jint set_audio_stretch_mode;
+
+ void init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/PlaybackParams");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ speed = env->GetFieldID(clazz, "mSpeed", "F");
+ pitch = env->GetFieldID(clazz, "mPitch", "F");
+ audio_fallback_mode = env->GetFieldID(clazz, "mAudioFallbackMode", "I");
+ audio_stretch_mode = env->GetFieldID(clazz, "mAudioStretchMode", "I");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_speed =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SPEED", "I"));
+ set_pitch =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_PITCH", "I"));
+ set_audio_fallback_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_FALLBACK_MODE", "I"));
+ set_audio_stretch_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_STRETCH_MODE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+ }
+
+ void exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+ }
+ };
+
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject params) {
+ audioRate.mSpeed = env->GetFloatField(params, fields.speed);
+ audioRate.mPitch = env->GetFloatField(params, fields.pitch);
+ audioRate.mFallbackMode =
+ (AudioTimestretchFallbackMode)env->GetIntField(params, fields.audio_fallback_mode);
+ audioRate.mStretchMode =
+ (AudioTimestretchStretchMode)env->GetIntField(params, fields.audio_stretch_mode);
+ int set = env->GetIntField(params, fields.set);
+
+ speedSet = set & fields.set_speed;
+ pitchSet = set & fields.set_pitch;
+ audioFallbackModeSet = set & fields.set_audio_fallback_mode;
+ audioStretchModeSet = set & fields.set_audio_stretch_mode;
+ }
+
+ jobject asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject params = env->NewObject(fields.clazz, fields.constructID);
+ if (params == NULL) {
+ return NULL;
+ }
+ env->SetFloatField(params, fields.speed, (jfloat)audioRate.mSpeed);
+ env->SetFloatField(params, fields.pitch, (jfloat)audioRate.mPitch);
+ env->SetIntField(params, fields.audio_fallback_mode, (jint)audioRate.mFallbackMode);
+ env->SetIntField(params, fields.audio_stretch_mode, (jint)audioRate.mStretchMode);
+ env->SetIntField(
+ params, fields.set,
+ (speedSet ? fields.set_speed : 0)
+ | (pitchSet ? fields.set_pitch : 0)
+ | (audioFallbackModeSet ? fields.set_audio_fallback_mode : 0)
+ | (audioStretchModeSet ? fields.set_audio_stretch_mode : 0));
+
+ return params;
+ }
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_PLAYBACK_PARAMS_H_
diff --git a/media/jni/android_media_SyncParams.cpp b/media/jni/android_media_SyncParams.cpp
new file mode 100644
index 0000000..d9b2f1d
--- /dev/null
+++ b/media/jni/android_media_SyncParams.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "android_media_SyncParams.h"
+
+#include "JNIHelp.h"
+
+namespace android {
+
+void SyncParams::fields_t::init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/SyncParams");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ sync_source = env->GetFieldID(clazz, "mSyncSource", "I");
+ audio_adjust_mode = env->GetFieldID(clazz, "mAudioAdjustMode", "I");
+ tolerance = env->GetFieldID(clazz, "mTolerance", "F");
+ frame_rate = env->GetFieldID(clazz, "mFrameRate", "F");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_sync_source =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SYNC_SOURCE", "I"));
+ set_audio_adjust_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_ADJUST_MODE", "I"));
+ set_tolerance =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_TOLERANCE", "I"));
+ set_frame_rate =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_FRAME_RATE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+}
+
+void SyncParams::fields_t::exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+}
+
+void SyncParams::fillFromJobject(JNIEnv *env, const fields_t& fields, jobject params) {
+ sync.mSource = (AVSyncSource)env->GetIntField(params, fields.sync_source);
+ sync.mAudioAdjustMode = (AVSyncAudioAdjustMode)env->GetIntField(params, fields.audio_adjust_mode);
+ sync.mTolerance = env->GetFloatField(params, fields.tolerance);
+ frameRate = env->GetFloatField(params, fields.frame_rate);
+ int set = env->GetIntField(params, fields.set);
+
+ syncSourceSet = set & fields.set_sync_source;
+ audioAdjustModeSet = set & fields.set_audio_adjust_mode;
+ toleranceSet = set & fields.set_tolerance;
+ frameRateSet = set & fields.set_frame_rate;
+}
+
+jobject SyncParams::asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject params = env->NewObject(fields.clazz, fields.constructID);
+ if (params == NULL) {
+ return NULL;
+ }
+ env->SetIntField(params, fields.sync_source, (jint)sync.mSource);
+ env->SetIntField(params, fields.audio_adjust_mode, (jint)sync.mAudioAdjustMode);
+ env->SetFloatField(params, fields.tolerance, (jfloat)sync.mTolerance);
+ env->SetFloatField(params, fields.frame_rate, (jfloat)frameRate);
+ env->SetIntField(
+ params, fields.set,
+ (syncSourceSet ? fields.set_sync_source : 0)
+ | (audioAdjustModeSet ? fields.set_audio_adjust_mode : 0)
+ | (toleranceSet ? fields.set_tolerance : 0)
+ | (frameRateSet ? fields.set_frame_rate : 0));
+
+ return params;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_SyncParams.h b/media/jni/android_media_SyncParams.h
new file mode 100644
index 0000000..543d79a
--- /dev/null
+++ b/media/jni/android_media_SyncParams.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_SYNC_PARAMS_H_
+#define _ANDROID_MEDIA_SYNC_PARAMS_H_
+
+#include "jni.h"
+
+#include <media/stagefright/MediaSync.h>
+
+namespace android {
+
+struct SyncParams {
+ AVSyncSettings sync;
+ float frameRate;
+
+ bool syncSourceSet;
+ bool audioAdjustModeSet;
+ bool toleranceSet;
+ bool frameRateSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID sync_source;
+ jfieldID audio_adjust_mode;
+ jfieldID tolerance;
+ jfieldID frame_rate;
+ jfieldID set;
+ jint set_sync_source;
+ jint set_audio_adjust_mode;
+ jint set_tolerance;
+ jint set_frame_rate;
+
+ // initializes fields
+ void init(JNIEnv *env);
+
+ // releases global references held
+ void exit(JNIEnv *env);
+ };
+
+ // fills this from an android.media.SyncParams object
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject params);
+
+ // returns this as a android.media.SyncParams object
+ jobject asJobject(JNIEnv *env, const fields_t& fields);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_SYNC_PARAMS_H_
diff --git a/media/jni/audioeffect/android_media_AudioEffect.cpp b/media/jni/audioeffect/android_media_AudioEffect.cpp
index c364d46..96b72a2 100644
--- a/media/jni/audioeffect/android_media_AudioEffect.cpp
+++ b/media/jni/audioeffect/android_media_AudioEffect.cpp
@@ -25,6 +25,8 @@
#include <android_runtime/AndroidRuntime.h>
#include "media/AudioEffect.h"
+#include <ScopedUtfChars.h>
+
using namespace android;
#define AUDIOEFFECT_SUCCESS 0
@@ -249,7 +251,8 @@ android_media_AudioEffect_native_init(JNIEnv *env)
static jint
android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jstring type, jstring uuid, jint priority, jint sessionId, jintArray jId, jobjectArray javadesc)
+ jstring type, jstring uuid, jint priority, jint sessionId, jintArray jId,
+ jobjectArray javadesc, jstring opPackageName)
{
ALOGV("android_media_AudioEffect_native_setup");
AudioEffectJniStorage* lpJniStorage = NULL;
@@ -267,6 +270,8 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
jstring jdescName;
jstring jdescImplementor;
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
if (type != NULL) {
typeStr = env->GetStringUTFChars(type, NULL);
if (typeStr == NULL) { // Out of memory
@@ -312,6 +317,7 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
// create the native AudioEffect object
lpAudioEffect = new AudioEffect(typeStr,
+ String16(opPackageNameStr.c_str()),
uuidStr,
priority,
effectCallback,
@@ -868,7 +874,7 @@ android_media_AudioEffect_native_queryPreProcessings(JNIEnv *env, jclass clazz _
// Dalvik VM type signatures
static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_AudioEffect_native_init},
- {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;II[I[Ljava/lang/Object;)I",
+ {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;II[I[Ljava/lang/Object;Ljava/lang/String;)I",
(void *)android_media_AudioEffect_native_setup},
{"native_finalize", "()V", (void *)android_media_AudioEffect_native_finalize},
{"native_release", "()V", (void *)android_media_AudioEffect_native_release},
diff --git a/media/jni/audioeffect/android_media_Visualizer.cpp b/media/jni/audioeffect/android_media_Visualizer.cpp
index 460277f..abc681e 100644
--- a/media/jni/audioeffect/android_media_Visualizer.cpp
+++ b/media/jni/audioeffect/android_media_Visualizer.cpp
@@ -26,6 +26,8 @@
#include <utils/threads.h>
#include "media/Visualizer.h"
+#include <ScopedUtfChars.h>
+
using namespace android;
#define VISUALIZER_SUCCESS 0
@@ -331,7 +333,7 @@ static void android_media_visualizer_effect_callback(int32_t event,
static jint
android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jint sessionId, jintArray jId)
+ jint sessionId, jintArray jId, jstring opPackageName)
{
ALOGV("android_media_visualizer_native_setup");
visualizerJniStorage* lpJniStorage = NULL;
@@ -339,6 +341,8 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
Visualizer* lpVisualizer = NULL;
jint* nId = NULL;
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
lpJniStorage = new visualizerJniStorage();
if (lpJniStorage == NULL) {
ALOGE("setup: Error creating JNI Storage");
@@ -362,7 +366,8 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
}
// create the native Visualizer object
- lpVisualizer = new Visualizer(0,
+ lpVisualizer = new Visualizer(String16(opPackageNameStr.c_str()),
+ 0,
android_media_visualizer_effect_callback,
lpJniStorage,
sessionId);
@@ -662,7 +667,7 @@ android_media_setPeriodicCapture(JNIEnv *env, jobject thiz, jint rate, jboolean
// Dalvik VM type signatures
static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_visualizer_native_init},
- {"native_setup", "(Ljava/lang/Object;I[I)I",
+ {"native_setup", "(Ljava/lang/Object;I[ILjava/lang/String;)I",
(void *)android_media_visualizer_native_setup},
{"native_finalize", "()V", (void *)android_media_visualizer_native_finalize},
{"native_release", "()V", (void *)android_media_visualizer_native_release},
diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk
index 71ab013..2476056 100644
--- a/media/jni/soundpool/Android.mk
+++ b/media/jni/soundpool/Android.mk
@@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- android_media_SoundPool_SoundPoolImpl.cpp \
+ android_media_SoundPool.cpp \
SoundPool.cpp \
SoundPoolThread.cpp
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index a73209b..8038cdf 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -229,7 +229,7 @@ bool SoundPool::unload(int sampleID)
{
ALOGV("unload: sampleID=%d", sampleID);
Mutex::Autolock lock(&mLock);
- return mSamples.removeItem(sampleID);
+ return mSamples.removeItem(sampleID) >= 0; // removeItem() returns index or BAD_VALUE
}
int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
@@ -256,7 +256,7 @@ int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
dump();
// allocate a channel
- channel = allocateChannel_l(priority);
+ channel = allocateChannel_l(priority, sampleID);
// no channel allocated - return 0
if (!channel) {
@@ -271,13 +271,25 @@ int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
return channelID;
}
-SoundChannel* SoundPool::allocateChannel_l(int priority)
+SoundChannel* SoundPool::allocateChannel_l(int priority, int sampleID)
{
List<SoundChannel*>::iterator iter;
SoundChannel* channel = NULL;
- // allocate a channel
+ // check if channel for given sampleID still available
if (!mChannels.empty()) {
+ for (iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
+ if (sampleID == (*iter)->getPrevSampleID() && (*iter)->state() == SoundChannel::IDLE) {
+ channel = *iter;
+ mChannels.erase(iter);
+ ALOGV("Allocated recycled channel for same sampleID");
+ break;
+ }
+ }
+ }
+
+ // allocate any channel
+ if (!channel && !mChannels.empty()) {
iter = mChannels.begin();
if (priority >= (*iter)->priority()) {
channel = *iter;
@@ -502,10 +514,11 @@ static status_t decode(int fd, int64_t offset, int64_t length,
if (strncmp(mime, "audio/", 6) == 0) {
AMediaCodec *codec = AMediaCodec_createDecoderByType(mime);
- if (AMediaCodec_configure(codec, format,
- NULL /* window */, NULL /* drm */, 0 /* flags */) != AMEDIA_OK
- || AMediaCodec_start(codec) != AMEDIA_OK
- || AMediaExtractor_selectTrack(ex, i) != AMEDIA_OK) {
+ if (codec == NULL
+ || AMediaCodec_configure(codec, format,
+ NULL /* window */, NULL /* drm */, 0 /* flags */) != AMEDIA_OK
+ || AMediaCodec_start(codec) != AMEDIA_OK
+ || AMediaExtractor_selectTrack(ex, i) != AMEDIA_OK) {
AMediaExtractor_delete(ex);
AMediaCodec_delete(codec);
AMediaFormat_delete(format);
@@ -626,7 +639,7 @@ status_t Sample::doLoad()
goto error;
}
- if ((numChannels < 1) || (numChannels > 2)) {
+ if ((numChannels < 1) || (numChannels > 8)) {
ALOGE("Sample channel count (%d) out of range", numChannels);
status = BAD_VALUE;
goto error;
@@ -648,6 +661,7 @@ error:
void SoundChannel::init(SoundPool* soundPool)
{
mSoundPool = soundPool;
+ mPrevSampleID = -1;
}
// call with sound pool lock held
@@ -656,7 +670,7 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
{
sp<AudioTrack> oldTrack;
sp<AudioTrack> newTrack;
- status_t status;
+ status_t status = NO_ERROR;
{ // scope for the lock
Mutex::Autolock lock(&mLock);
@@ -689,8 +703,10 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
size_t frameCount = 0;
if (loop) {
- frameCount = sample->size()/numChannels/
- ((sample->format() == AUDIO_FORMAT_PCM_16_BIT) ? sizeof(int16_t) : sizeof(uint8_t));
+ const audio_format_t format = sample->format();
+ const size_t frameSize = audio_is_linear_pcm(format)
+ ? numChannels * audio_bytes_per_sample(format) : 1;
+ frameCount = sample->size() / frameSize;
}
#ifndef USE_SHARED_MEM_BUFFER
@@ -701,38 +717,51 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
}
#endif
- // mToggle toggles each time a track is started on a given channel.
- // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
- // as callback user data. This enables the detection of callbacks received from the old
- // audio track while the new one is being started and avoids processing them with
- // wrong audio audio buffer size (mAudioBufferSize)
- unsigned long toggle = mToggle ^ 1;
- void *userData = (void *)((unsigned long)this | toggle);
- audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
-
- // do not create a new audio track if current track is compatible with sample parameters
-#ifdef USE_SHARED_MEM_BUFFER
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
-#else
- uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
- bufferFrames);
-#endif
- oldTrack = mAudioTrack;
- status = newTrack->initCheck();
- if (status != NO_ERROR) {
- ALOGE("Error creating AudioTrack");
- goto exit;
+ // check if the existing track has the same sample id.
+ if (mAudioTrack != 0 && mPrevSampleID == sample->sampleID()) {
+ // the sample rate may fail to change if the audio track is a fast track.
+ if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
+ newTrack = mAudioTrack;
+ ALOGV("reusing track %p for sample %d", mAudioTrack.get(), sample->sampleID());
+ }
+ }
+ if (newTrack == 0) {
+ // mToggle toggles each time a track is started on a given channel.
+ // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
+ // as callback user data. This enables the detection of callbacks received from the old
+ // audio track while the new one is being started and avoids processing them with
+ // wrong audio audio buffer size (mAudioBufferSize)
+ unsigned long toggle = mToggle ^ 1;
+ void *userData = (void *)((unsigned long)this | toggle);
+ audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
+
+ // do not create a new audio track if current track is compatible with sample parameters
+ #ifdef USE_SHARED_MEM_BUFFER
+ newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
+ channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData,
+ 0 /*default notification frames*/, AUDIO_SESSION_ALLOCATE,
+ AudioTrack::TRANSFER_DEFAULT,
+ NULL /*offloadInfo*/, -1 /*uid*/, -1 /*pid*/, mSoundPool->attributes());
+ #else
+ uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
+ newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
+ channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
+ bufferFrames, AUDIO_SESSION_ALLOCATE, AudioTrack::TRANSFER_DEFAULT,
+ NULL /*offloadInfo*/, -1 /*uid*/, -1 /*pid*/, mSoundPool->attributes());
+ #endif
+ oldTrack = mAudioTrack;
+ status = newTrack->initCheck();
+ if (status != NO_ERROR) {
+ ALOGE("Error creating AudioTrack");
+ goto exit;
+ }
+ // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
+ mToggle = toggle;
+ mAudioTrack = newTrack;
+ ALOGV("using new track %p for sample %d", newTrack.get(), sample->sampleID());
}
- ALOGV("setVolume %p", newTrack.get());
newTrack->setVolume(leftVolume, rightVolume);
newTrack->setLoop(0, frameCount, loop);
-
- // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
- mToggle = toggle;
- mAudioTrack = newTrack;
mPos = 0;
mSample = sample;
mChannelID = nextChannelID;
@@ -875,6 +904,7 @@ bool SoundChannel::doStop_l()
setVolume_l(0, 0);
ALOGV("stop");
mAudioTrack->stop();
+ mPrevSampleID = mSample->sampleID();
mSample.clear();
mState = IDLE;
mPriority = IDLE_PRIORITY;
diff --git a/media/jni/soundpool/SoundPool.h b/media/jni/soundpool/SoundPool.h
index 9d9cbdf..4aacf53 100644
--- a/media/jni/soundpool/SoundPool.h
+++ b/media/jni/soundpool/SoundPool.h
@@ -72,8 +72,8 @@ private:
volatile int32_t mRefCount;
uint16_t mSampleID;
uint16_t mSampleRate;
- uint8_t mState : 3;
- uint8_t mNumChannels : 2;
+ uint8_t mState;
+ uint8_t mNumChannels;
audio_format_t mFormat;
int mFd;
int64_t mOffset;
@@ -136,6 +136,7 @@ public:
void nextEvent();
int nextChannelID() { return mNextEvent.channelID(); }
void dump();
+ int getPrevSampleID(void) { return mPrevSampleID; }
private:
static void callback(int event, void* user, void *info);
@@ -152,6 +153,7 @@ private:
int mAudioBufferSize;
unsigned long mToggle;
bool mAutoPaused;
+ int mPrevSampleID;
};
// application object for managing a pool of sounds
@@ -193,7 +195,7 @@ private:
sp<Sample> findSample(int sampleID) { return mSamples.valueFor(sampleID); }
SoundChannel* findChannel (int channelID);
SoundChannel* findNextChannel (int channelID);
- SoundChannel* allocateChannel_l(int priority);
+ SoundChannel* allocateChannel_l(int priority, int sampleID);
void moveToFront_l(SoundChannel* channel);
void notify(SoundPoolEvent event);
void dump();
diff --git a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp b/media/jni/soundpool/android_media_SoundPool.cpp
index b2333f8..fc4cf05 100644
--- a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp
+++ b/media/jni/soundpool/android_media_SoundPool.cpp
@@ -47,10 +47,10 @@ static audio_attributes_fields_t javaAudioAttrFields;
// ----------------------------------------------------------------------------
static jint
-android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
+android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
jlong offset, jlong length, jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_load_FD");
+ ALOGV("android_media_SoundPool_load_FD");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->load(jniGetFDFromFileDescriptor(env, fileDescriptor),
@@ -58,104 +58,104 @@ android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject
}
static jboolean
-android_media_SoundPool_SoundPoolImpl_unload(JNIEnv *env, jobject thiz, jint sampleID) {
- ALOGV("android_media_SoundPool_SoundPoolImpl_unload\n");
+android_media_SoundPool_unload(JNIEnv *env, jobject thiz, jint sampleID) {
+ ALOGV("android_media_SoundPool_unload\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return JNI_FALSE;
return ap->unload(sampleID) ? JNI_TRUE : JNI_FALSE;
}
static jint
-android_media_SoundPool_SoundPoolImpl_play(JNIEnv *env, jobject thiz, jint sampleID,
+android_media_SoundPool_play(JNIEnv *env, jobject thiz, jint sampleID,
jfloat leftVolume, jfloat rightVolume, jint priority, jint loop,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_play\n");
+ ALOGV("android_media_SoundPool_play\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->play(sampleID, leftVolume, rightVolume, priority, loop, rate);
}
static void
-android_media_SoundPool_SoundPoolImpl_pause(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_pause(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_pause");
+ ALOGV("android_media_SoundPool_pause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->pause(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_resume(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_resume(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_resume");
+ ALOGV("android_media_SoundPool_resume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->resume(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_autoPause(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoPause(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoPause");
+ ALOGV("android_media_SoundPool_autoPause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoPause();
}
static void
-android_media_SoundPool_SoundPoolImpl_autoResume(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoResume(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoResume");
+ ALOGV("android_media_SoundPool_autoResume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoResume();
}
static void
-android_media_SoundPool_SoundPoolImpl_stop(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_stop(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_stop");
+ ALOGV("android_media_SoundPool_stop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->stop(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_setVolume(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setVolume(JNIEnv *env, jobject thiz, jint channelID,
jfloat leftVolume, jfloat rightVolume)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setVolume");
+ ALOGV("android_media_SoundPool_setVolume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setVolume(channelID, (float) leftVolume, (float) rightVolume);
}
static void
-android_media_SoundPool_SoundPoolImpl_setPriority(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setPriority(JNIEnv *env, jobject thiz, jint channelID,
jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setPriority");
+ ALOGV("android_media_SoundPool_setPriority");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setPriority(channelID, (int) priority);
}
static void
-android_media_SoundPool_SoundPoolImpl_setLoop(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setLoop(JNIEnv *env, jobject thiz, jint channelID,
int loop)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setLoop");
+ ALOGV("android_media_SoundPool_setLoop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setLoop(channelID, loop);
}
static void
-android_media_SoundPool_SoundPoolImpl_setRate(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setRate(JNIEnv *env, jobject thiz, jint channelID,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setRate");
+ ALOGV("android_media_SoundPool_setRate");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setRate(channelID, (float) rate);
@@ -169,7 +169,7 @@ static void android_media_callback(SoundPoolEvent event, SoundPool* soundPool, v
}
static jint
-android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
+android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
jint maxChannels, jobject jaa)
{
if (jaa == 0) {
@@ -191,7 +191,7 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
(audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType);
paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
- ALOGV("android_media_SoundPool_SoundPoolImpl_native_setup");
+ ALOGV("android_media_SoundPool_native_setup");
SoundPool *ap = new SoundPool(maxChannels, paa);
if (ap == NULL) {
return -1;
@@ -211,9 +211,9 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
}
static void
-android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
+android_media_SoundPool_release(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_release");
+ ALOGV("android_media_SoundPool_release");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap != NULL) {
@@ -236,63 +236,63 @@ android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
static JNINativeMethod gMethods[] = {
{ "_load",
"(Ljava/io/FileDescriptor;JJI)I",
- (void *)android_media_SoundPool_SoundPoolImpl_load_FD
+ (void *)android_media_SoundPool_load_FD
},
{ "unload",
"(I)Z",
- (void *)android_media_SoundPool_SoundPoolImpl_unload
+ (void *)android_media_SoundPool_unload
},
{ "_play",
"(IFFIIF)I",
- (void *)android_media_SoundPool_SoundPoolImpl_play
+ (void *)android_media_SoundPool_play
},
{ "pause",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_pause
+ (void *)android_media_SoundPool_pause
},
{ "resume",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_resume
+ (void *)android_media_SoundPool_resume
},
{ "autoPause",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoPause
+ (void *)android_media_SoundPool_autoPause
},
{ "autoResume",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoResume
+ (void *)android_media_SoundPool_autoResume
},
{ "stop",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_stop
+ (void *)android_media_SoundPool_stop
},
{ "_setVolume",
"(IFF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setVolume
+ (void *)android_media_SoundPool_setVolume
},
{ "setPriority",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setPriority
+ (void *)android_media_SoundPool_setPriority
},
{ "setLoop",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setLoop
+ (void *)android_media_SoundPool_setLoop
},
{ "setRate",
"(IF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setRate
+ (void *)android_media_SoundPool_setRate
},
{ "native_setup",
"(Ljava/lang/Object;ILjava/lang/Object;)I",
- (void*)android_media_SoundPool_SoundPoolImpl_native_setup
+ (void*)android_media_SoundPool_native_setup
},
{ "release",
"()V",
- (void*)android_media_SoundPool_SoundPoolImpl_release
+ (void*)android_media_SoundPool_release
}
};
-static const char* const kClassPathName = "android/media/SoundPool$SoundPoolImpl";
+static const char* const kClassPathName = "android/media/SoundPool";
jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
{
@@ -314,14 +314,14 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
fields.mNativeContext = env->GetFieldID(clazz, "mNativeContext", "J");
if (fields.mNativeContext == NULL) {
- ALOGE("Can't find SoundPoolImpl.mNativeContext");
+ ALOGE("Can't find SoundPool.mNativeContext");
return result;
}
fields.mPostEvent = env->GetStaticMethodID(clazz, "postEventFromNative",
"(Ljava/lang/Object;IIILjava/lang/Object;)V");
if (fields.mPostEvent == NULL) {
- ALOGE("Can't find android/media/SoundPoolImpl.postEventFromNative");
+ ALOGE("Can't find android/media/SoundPool.postEventFromNative");
return result;
}
diff --git a/media/lib/remotedisplay/java/com/android/media/remotedisplay/RemoteDisplayProvider.java b/media/lib/remotedisplay/java/com/android/media/remotedisplay/RemoteDisplayProvider.java
index e2df77c..4d3edb8 100644
--- a/media/lib/remotedisplay/java/com/android/media/remotedisplay/RemoteDisplayProvider.java
+++ b/media/lib/remotedisplay/java/com/android/media/remotedisplay/RemoteDisplayProvider.java
@@ -287,7 +287,7 @@ public abstract class RemoteDisplayProvider {
*/
public PendingIntent getSettingsPendingIntent() {
if (mSettingsPendingIntent == null) {
- Intent settingsIntent = new Intent(Settings.ACTION_WIFI_DISPLAY_SETTINGS);
+ Intent settingsIntent = new Intent(Settings.ACTION_CAST_SETTINGS);
settingsIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
| Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED
| Intent.FLAG_ACTIVITY_CLEAR_TOP);
diff --git a/media/packages/BluetoothMidiService/Android.mk b/media/packages/BluetoothMidiService/Android.mk
new file mode 100644
index 0000000..2c9c3c5
--- /dev/null
+++ b/media/packages/BluetoothMidiService/Android.mk
@@ -0,0 +1,11 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := $(call all-subdir-java-files)
+
+LOCAL_PACKAGE_NAME := BluetoothMidiService
+LOCAL_CERTIFICATE := platform
+
+include $(BUILD_PACKAGE)
diff --git a/media/packages/BluetoothMidiService/AndroidManifest.xml b/media/packages/BluetoothMidiService/AndroidManifest.xml
new file mode 100644
index 0000000..15aa581
--- /dev/null
+++ b/media/packages/BluetoothMidiService/AndroidManifest.xml
@@ -0,0 +1,17 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.android.bluetoothmidiservice"
+ >
+
+ <uses-feature android:name="android.hardware.bluetooth_le" android:required="true"/>
+ <uses-feature android:name="android.software.midi" android:required="true"/>
+ <uses-permission android:name="android.permission.BLUETOOTH"/>
+
+ <application
+ android:label="@string/app_name">
+ <service android:name="BluetoothMidiService">
+ <intent-filter>
+ <action android:name="android.media.midi.BluetoothMidiService" />
+ </intent-filter>
+ </service>
+ </application>
+</manifest>
diff --git a/media/packages/BluetoothMidiService/res/values/strings.xml b/media/packages/BluetoothMidiService/res/values/strings.xml
new file mode 100644
index 0000000..c98e56c
--- /dev/null
+++ b/media/packages/BluetoothMidiService/res/values/strings.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <string name="app_name">Bluetooth MIDI Service</string>
+</resources>
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java
new file mode 100644
index 0000000..60c6570
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java
@@ -0,0 +1,278 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothGatt;
+import android.bluetooth.BluetoothGattCallback;
+import android.bluetooth.BluetoothGattCharacteristic;
+import android.bluetooth.BluetoothGattDescriptor;
+import android.bluetooth.BluetoothGattService;
+import android.bluetooth.BluetoothProfile;
+import android.content.Context;
+import android.media.midi.MidiReceiver;
+import android.media.midi.MidiManager;
+import android.media.midi.MidiDeviceServer;
+import android.media.midi.MidiDeviceInfo;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.util.Log;
+
+import com.android.internal.midi.MidiEventScheduler;
+import com.android.internal.midi.MidiEventScheduler.MidiEvent;
+
+import libcore.io.IoUtils;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Class used to implement a Bluetooth MIDI device.
+ */
+public final class BluetoothMidiDevice {
+
+ private static final String TAG = "BluetoothMidiDevice";
+ private static final boolean DEBUG = false;
+
+ private static final int MAX_PACKET_SIZE = 20;
+
+ // Bluetooth MIDI Gatt service UUID
+ private static final UUID MIDI_SERVICE = UUID.fromString(
+ "03B80E5A-EDE8-4B33-A751-6CE34EC4C700");
+ // Bluetooth MIDI Gatt characteristic UUID
+ private static final UUID MIDI_CHARACTERISTIC = UUID.fromString(
+ "7772E5DB-3868-4112-A1A9-F2669D106BF3");
+ // Descriptor UUID for enabling characteristic changed notifications
+ private static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString(
+ "00002902-0000-1000-8000-00805f9b34fb");
+
+ private final BluetoothDevice mBluetoothDevice;
+ private final BluetoothMidiService mService;
+ private final MidiManager mMidiManager;
+ private MidiReceiver mOutputReceiver;
+ private final MidiEventScheduler mEventScheduler = new MidiEventScheduler();
+
+ private MidiDeviceServer mDeviceServer;
+ private BluetoothGatt mBluetoothGatt;
+
+ private BluetoothGattCharacteristic mCharacteristic;
+
+ // PacketReceiver for receiving formatted packets from our BluetoothPacketEncoder
+ private final PacketReceiver mPacketReceiver = new PacketReceiver();
+
+ private final BluetoothPacketEncoder mPacketEncoder
+ = new BluetoothPacketEncoder(mPacketReceiver, MAX_PACKET_SIZE);
+
+ private final BluetoothPacketDecoder mPacketDecoder
+ = new BluetoothPacketDecoder(MAX_PACKET_SIZE);
+
+ private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
+ @Override
+ public void onConnectionStateChange(BluetoothGatt gatt, int status,
+ int newState) {
+ String intentAction;
+ if (newState == BluetoothProfile.STATE_CONNECTED) {
+ Log.i(TAG, "Connected to GATT server.");
+ Log.i(TAG, "Attempting to start service discovery:" +
+ mBluetoothGatt.discoverServices());
+ } else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
+ Log.i(TAG, "Disconnected from GATT server.");
+ close();
+ }
+ }
+
+ @Override
+ public void onServicesDiscovered(BluetoothGatt gatt, int status) {
+ if (status == BluetoothGatt.GATT_SUCCESS) {
+ List<BluetoothGattService> services = mBluetoothGatt.getServices();
+ for (BluetoothGattService service : services) {
+ if (MIDI_SERVICE.equals(service.getUuid())) {
+ Log.d(TAG, "found MIDI_SERVICE");
+ List<BluetoothGattCharacteristic> characteristics
+ = service.getCharacteristics();
+ for (BluetoothGattCharacteristic characteristic : characteristics) {
+ if (MIDI_CHARACTERISTIC.equals(characteristic.getUuid())) {
+ Log.d(TAG, "found MIDI_CHARACTERISTIC");
+ mCharacteristic = characteristic;
+
+ // Specification says to read the characteristic first and then
+ // switch to receiving notifications
+ mBluetoothGatt.readCharacteristic(characteristic);
+ break;
+ }
+ }
+ break;
+ }
+ }
+ } else {
+ Log.e(TAG, "onServicesDiscovered received: " + status);
+ close();
+ }
+ }
+
+ @Override
+ public void onCharacteristicRead(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic,
+ int status) {
+ Log.d(TAG, "onCharacteristicRead " + status);
+
+ // switch to receiving notifications after initial characteristic read
+ mBluetoothGatt.setCharacteristicNotification(characteristic, true);
+
+ BluetoothGattDescriptor descriptor = characteristic.getDescriptor(
+ CLIENT_CHARACTERISTIC_CONFIG);
+ if (descriptor != null) {
+ descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE);
+ mBluetoothGatt.writeDescriptor(descriptor);
+ } else {
+ Log.e(TAG, "No CLIENT_CHARACTERISTIC_CONFIG for device " + mBluetoothDevice);
+ }
+ }
+
+ @Override
+ public void onCharacteristicWrite(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic,
+ int status) {
+ Log.d(TAG, "onCharacteristicWrite " + status);
+ mPacketEncoder.writeComplete();
+ }
+
+ @Override
+ public void onCharacteristicChanged(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic) {
+ if (DEBUG) {
+ logByteArray("Received ", characteristic.getValue(), 0,
+ characteristic.getValue().length);
+ }
+ mPacketDecoder.decodePacket(characteristic.getValue(), mOutputReceiver);
+ }
+ };
+
+ // This receives MIDI data that has already been passed through our MidiEventScheduler
+ // and has been normalized by our MidiFramer.
+
+ private class PacketReceiver implements PacketEncoder.PacketReceiver {
+ // buffers of every possible packet size
+ private final byte[][] mWriteBuffers;
+
+ public PacketReceiver() {
+ // Create buffers of every possible packet size
+ mWriteBuffers = new byte[MAX_PACKET_SIZE + 1][];
+ for (int i = 0; i <= MAX_PACKET_SIZE; i++) {
+ mWriteBuffers[i] = new byte[i];
+ }
+ }
+
+ @Override
+ public void writePacket(byte[] buffer, int count) {
+ if (mCharacteristic == null) {
+ Log.w(TAG, "not ready to send packet yet");
+ return;
+ }
+ byte[] writeBuffer = mWriteBuffers[count];
+ System.arraycopy(buffer, 0, writeBuffer, 0, count);
+ mCharacteristic.setValue(writeBuffer);
+ if (DEBUG) {
+ logByteArray("Sent ", mCharacteristic.getValue(), 0,
+ mCharacteristic.getValue().length);
+ }
+ mBluetoothGatt.writeCharacteristic(mCharacteristic);
+ }
+ }
+
+ public BluetoothMidiDevice(Context context, BluetoothDevice device,
+ BluetoothMidiService service) {
+ mBluetoothDevice = device;
+ mService = service;
+
+ mBluetoothGatt = mBluetoothDevice.connectGatt(context, false, mGattCallback);
+
+ mMidiManager = (MidiManager)context.getSystemService(Context.MIDI_SERVICE);
+
+ Bundle properties = new Bundle();
+ properties.putString(MidiDeviceInfo.PROPERTY_NAME, mBluetoothGatt.getDevice().getName());
+ properties.putParcelable(MidiDeviceInfo.PROPERTY_BLUETOOTH_DEVICE,
+ mBluetoothGatt.getDevice());
+
+ MidiReceiver[] inputPortReceivers = new MidiReceiver[1];
+ inputPortReceivers[0] = mEventScheduler.getReceiver();
+
+ mDeviceServer = mMidiManager.createDeviceServer(inputPortReceivers, 1,
+ null, null, properties, MidiDeviceInfo.TYPE_BLUETOOTH, null);
+
+ mOutputReceiver = mDeviceServer.getOutputPortReceivers()[0];
+
+ // This thread waits for outgoing messages from our MidiEventScheduler
+ // And forwards them to our MidiFramer to be prepared to send via Bluetooth.
+ new Thread("BluetoothMidiDevice " + mBluetoothDevice) {
+ @Override
+ public void run() {
+ while (true) {
+ MidiEvent event;
+ try {
+ event = (MidiEvent)mEventScheduler.waitNextEvent();
+ } catch (InterruptedException e) {
+ // try again
+ continue;
+ }
+ if (event == null) {
+ break;
+ }
+ try {
+ mPacketEncoder.send(event.data, 0, event.count,
+ event.getTimestamp());
+ } catch (IOException e) {
+ Log.e(TAG, "mPacketAccumulator.send failed", e);
+ }
+ mEventScheduler.addEventToPool(event);
+ }
+ Log.d(TAG, "BluetoothMidiDevice thread exit");
+ }
+ }.start();
+ }
+
+ private void close() {
+ synchronized (mBluetoothDevice) {
+ mEventScheduler.close();
+ if (mDeviceServer != null) {
+ IoUtils.closeQuietly(mDeviceServer);
+ mDeviceServer = null;
+ mService.deviceClosed(mBluetoothDevice);
+ }
+ if (mBluetoothGatt != null) {
+ mBluetoothGatt.close();
+ mBluetoothGatt = null;
+ }
+ }
+ }
+
+ public IBinder getBinder() {
+ return mDeviceServer.asBinder();
+ }
+
+ private static void logByteArray(String prefix, byte[] value, int offset, int count) {
+ StringBuilder builder = new StringBuilder(prefix);
+ for (int i = offset; i < count; i++) {
+ builder.append(String.format("0x%02X", value[i]));
+ if (i != value.length - 1) {
+ builder.append(", ");
+ }
+ }
+ Log.d(TAG, builder.toString());
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java
new file mode 100644
index 0000000..fbde2b4
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.app.Service;
+import android.bluetooth.BluetoothDevice;
+import android.content.Intent;
+import android.media.midi.MidiManager;
+import android.os.IBinder;
+import android.util.Log;
+
+import java.util.HashMap;
+
+public class BluetoothMidiService extends Service {
+ private static final String TAG = "BluetoothMidiService";
+
+ // BluetoothMidiDevices keyed by BluetoothDevice
+ private final HashMap<BluetoothDevice,BluetoothMidiDevice> mDeviceServerMap
+ = new HashMap<BluetoothDevice,BluetoothMidiDevice>();
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ if (MidiManager.BLUETOOTH_MIDI_SERVICE_INTENT.equals(intent.getAction())) {
+ BluetoothDevice bluetoothDevice = (BluetoothDevice)intent.getParcelableExtra("device");
+ if (bluetoothDevice == null) {
+ Log.e(TAG, "no BluetoothDevice in onBind intent");
+ return null;
+ }
+
+ BluetoothMidiDevice device;
+ synchronized (mDeviceServerMap) {
+ device = mDeviceServerMap.get(bluetoothDevice);
+ if (device == null) {
+ device = new BluetoothMidiDevice(this, bluetoothDevice, this);
+ }
+ }
+ return device.getBinder();
+ }
+ return null;
+ }
+
+ void deviceClosed(BluetoothDevice device) {
+ synchronized (mDeviceServerMap) {
+ mDeviceServerMap.remove(device);
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java
new file mode 100644
index 0000000..ea95a01
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+import android.util.Log;
+
+import java.io.IOException;
+
+/**
+ * This is an abstract base class that decodes a packet buffer and passes it to a
+ * {@link android.media.midi.MidiReceiver}
+ */
+public class BluetoothPacketDecoder extends PacketDecoder {
+
+ private static final String TAG = "BluetoothPacketDecoder";
+
+ private final byte[] mBuffer;
+ private MidiBtleTimeTracker mTimeTracker;
+
+ private final int TIMESTAMP_MASK_HIGH = 0x1F80;
+ private final int TIMESTAMP_MASK_LOW = 0x7F;
+ private final int HEADER_TIMESTAMP_MASK = 0x3F;
+
+ public BluetoothPacketDecoder(int maxPacketSize) {
+ mBuffer = new byte[maxPacketSize];
+ }
+
+ @Override
+ public void decodePacket(byte[] buffer, MidiReceiver receiver) {
+ if (mTimeTracker == null) {
+ mTimeTracker = new MidiBtleTimeTracker(System.nanoTime());
+ }
+
+ int length = buffer.length;
+
+ // NOTE his code allows running status across packets,
+ // although the specification does not allow that.
+
+ if (length < 1) {
+ Log.e(TAG, "empty packet");
+ return;
+ }
+ byte header = buffer[0];
+ if ((header & 0xC0) != 0x80) {
+ Log.e(TAG, "packet does not start with header");
+ return;
+ }
+
+ // shift bits 0 - 5 to bits 7 - 12
+ int highTimestamp = (header & HEADER_TIMESTAMP_MASK) << 7;
+ boolean lastWasTimestamp = false;
+ int dataCount = 0;
+ int previousLowTimestamp = 0;
+ long nanoTimestamp = 0;
+ int currentTimestamp = 0;
+
+ // iterate through the rest of the packet, separating MIDI data from timestamps
+ for (int i = 1; i < buffer.length; i++) {
+ byte b = buffer[i];
+
+ if ((b & 0x80) != 0 && !lastWasTimestamp) {
+ lastWasTimestamp = true;
+ int lowTimestamp = b & TIMESTAMP_MASK_LOW;
+ if (lowTimestamp < previousLowTimestamp) {
+ highTimestamp = (highTimestamp + 0x0080) & TIMESTAMP_MASK_HIGH;
+ }
+ previousLowTimestamp = lowTimestamp;
+
+ int newTimestamp = highTimestamp | lowTimestamp;
+ if (newTimestamp != currentTimestamp) {
+ if (dataCount > 0) {
+ // send previous message separately since it has a different timestamp
+ try {
+ receiver.send(mBuffer, 0, dataCount, nanoTimestamp);
+ } catch (IOException e) {
+ // ???
+ }
+ dataCount = 0;
+ }
+ currentTimestamp = newTimestamp;
+ }
+
+ // calculate nanoTimestamp
+ long now = System.nanoTime();
+ nanoTimestamp = mTimeTracker.convertTimestampToNanotime(currentTimestamp, now);
+ } else {
+ lastWasTimestamp = false;
+ mBuffer[dataCount++] = b;
+ }
+ }
+
+ if (dataCount > 0) {
+ try {
+ receiver.send(mBuffer, 0, dataCount, nanoTimestamp);
+ } catch (IOException e) {
+ // ???
+ }
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java
new file mode 100644
index 0000000..5fb162c
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+import com.android.internal.midi.MidiConstants;
+import com.android.internal.midi.MidiFramer;
+
+import java.io.IOException;
+
+/**
+ * This class accumulates MIDI messages to form a MIDI packet.
+ */
+public class BluetoothPacketEncoder extends PacketEncoder {
+
+ private static final String TAG = "BluetoothPacketEncoder";
+
+ private static final long MILLISECOND_NANOS = 1000000L;
+
+ // mask for generating 13 bit timestamps
+ private static final int MILLISECOND_MASK = 0x1FFF;
+
+ private final PacketReceiver mPacketReceiver;
+
+ // buffer for accumulating messages to write
+ private final byte[] mAccumulationBuffer;
+ // number of bytes currently in mAccumulationBuffer
+ private int mAccumulatedBytes;
+ // timestamp for first message in current packet
+ private int mPacketTimestamp;
+ // current running status, or zero if none
+ private byte mRunningStatus;
+
+ private boolean mWritePending;
+
+ private final Object mLock = new Object();
+
+ // This receives normalized data from mMidiFramer and accumulates it into a packet buffer
+ private final MidiReceiver mFramedDataReceiver = new MidiReceiver() {
+ @Override
+ public void onSend(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+
+ synchronized (mLock) {
+ int milliTimestamp = (int)(timestamp / MILLISECOND_NANOS) & MILLISECOND_MASK;
+ byte status = msg[offset];
+ boolean isSysExStart = (status == MidiConstants.STATUS_SYSTEM_EXCLUSIVE);
+ boolean isSysExContinuation = ((status & 0x80) == 0);
+
+ int bytesNeeded;
+ if (isSysExStart || isSysExContinuation) {
+ // SysEx messages can be split into multiple packets
+ bytesNeeded = 1;
+ } else {
+ bytesNeeded = count;
+ }
+
+ boolean needsTimestamp = (milliTimestamp != mPacketTimestamp);
+ if (isSysExStart) {
+ // SysEx start byte must be preceded by a timestamp
+ needsTimestamp = true;
+ } else if (isSysExContinuation) {
+ // SysEx continuation packets must not have timestamp byte
+ needsTimestamp = false;
+ }
+ if (needsTimestamp) bytesNeeded++; // add one for timestamp byte
+ if (status == mRunningStatus) bytesNeeded--; // subtract one for status byte
+
+ if (mAccumulatedBytes + bytesNeeded > mAccumulationBuffer.length) {
+ // write out our data if there is no more room
+ // if necessary, block until previous packet is sent
+ flushLocked(true);
+ }
+
+ // write the header if necessary
+ if (appendHeader(milliTimestamp)) {
+ needsTimestamp = !isSysExContinuation;
+ }
+
+ // write new timestamp byte if necessary
+ if (needsTimestamp) {
+ // timestamp byte with bits 0 - 6 of timestamp
+ mAccumulationBuffer[mAccumulatedBytes++] =
+ (byte)(0x80 | (milliTimestamp & 0x7F));
+ mPacketTimestamp = milliTimestamp;
+ }
+
+ if (isSysExStart || isSysExContinuation) {
+ // MidiFramer will end the packet with SysEx End if there is one in the buffer
+ boolean hasSysExEnd =
+ (msg[offset + count - 1] == MidiConstants.STATUS_END_SYSEX);
+ int remaining = (hasSysExEnd ? count - 1 : count);
+
+ while (remaining > 0) {
+ if (mAccumulatedBytes == mAccumulationBuffer.length) {
+ // write out our data if there is no more room
+ // if necessary, block until previous packet is sent
+ flushLocked(true);
+ appendHeader(milliTimestamp);
+ }
+
+ int copy = mAccumulationBuffer.length - mAccumulatedBytes;
+ if (copy > remaining) copy = remaining;
+ System.arraycopy(msg, offset, mAccumulationBuffer, mAccumulatedBytes, copy);
+ mAccumulatedBytes += copy;
+ offset += copy;
+ remaining -= copy;
+ }
+
+ if (hasSysExEnd) {
+ // SysEx End command must be preceeded by a timestamp byte
+ if (mAccumulatedBytes + 2 > mAccumulationBuffer.length) {
+ // write out our data if there is no more room
+ // if necessary, block until previous packet is sent
+ flushLocked(true);
+ appendHeader(milliTimestamp);
+ }
+ mAccumulationBuffer[mAccumulatedBytes++] =
+ (byte)(0x80 | (milliTimestamp & 0x7F));
+ mAccumulationBuffer[mAccumulatedBytes++] = MidiConstants.STATUS_END_SYSEX;
+ }
+ } else {
+ // Non-SysEx message
+ if (status != mRunningStatus) {
+ mAccumulationBuffer[mAccumulatedBytes++] = status;
+ if (MidiConstants.allowRunningStatus(status)) {
+ mRunningStatus = status;
+ } else if (MidiConstants.cancelsRunningStatus(status)) {
+ mRunningStatus = 0;
+ }
+ }
+
+ // now copy data bytes
+ int dataLength = count - 1;
+ System.arraycopy(msg, offset + 1, mAccumulationBuffer, mAccumulatedBytes,
+ dataLength);
+ mAccumulatedBytes += dataLength;
+ }
+
+ // write the packet if possible, but do not block
+ flushLocked(false);
+ }
+ }
+ };
+
+ private boolean appendHeader(int milliTimestamp) {
+ // write header if we are starting a new packet
+ if (mAccumulatedBytes == 0) {
+ // header byte with timestamp bits 7 - 12
+ mAccumulationBuffer[mAccumulatedBytes++] =
+ (byte)(0x80 | ((milliTimestamp >> 7) & 0x3F));
+ mPacketTimestamp = milliTimestamp;
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ // MidiFramer for normalizing incoming data
+ private final MidiFramer mMidiFramer = new MidiFramer(mFramedDataReceiver);
+
+ public BluetoothPacketEncoder(PacketReceiver packetReceiver, int maxPacketSize) {
+ mPacketReceiver = packetReceiver;
+ mAccumulationBuffer = new byte[maxPacketSize];
+ }
+
+ @Override
+ public void onSend(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+ // normalize the data by passing it through a MidiFramer first
+ mMidiFramer.send(msg, offset, count, timestamp);
+ }
+
+ @Override
+ public void writeComplete() {
+ synchronized (mLock) {
+ mWritePending = false;
+ flushLocked(false);
+ mLock.notify();
+ }
+ }
+
+ private void flushLocked(boolean canBlock) {
+ if (mWritePending && !canBlock) {
+ return;
+ }
+
+ while (mWritePending && mAccumulatedBytes > 0) {
+ try {
+ mLock.wait();
+ } catch (InterruptedException e) {
+ // try again
+ continue;
+ }
+ }
+
+ if (mAccumulatedBytes > 0) {
+ mPacketReceiver.writePacket(mAccumulationBuffer, mAccumulatedBytes);
+ mAccumulatedBytes = 0;
+ mPacketTimestamp = 0;
+ mRunningStatus = 0;
+ mWritePending = true;
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java
new file mode 100644
index 0000000..5202f9a
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+/**
+ * Convert MIDI over BTLE timestamps to system nanotime.
+ */
+public class MidiBtleTimeTracker {
+
+ public final static long NANOS_PER_MILLI = 1000000L;
+
+ private final static long RANGE_MILLIS = 0x2000; // per MIDI / BTLE standard
+ private final static long RANGE_NANOS = RANGE_MILLIS * NANOS_PER_MILLI;
+
+ private int mWindowMillis = 20; // typical max connection interval
+ private long mWindowNanos = mWindowMillis * NANOS_PER_MILLI;
+
+ private int mPreviousTimestamp; // Used to calculate deltas.
+ private long mPreviousNow;
+ // Our model of the peripherals millisecond clock.
+ private long mPeripheralTimeMillis;
+ // Host time that corresponds to time=0 on the peripheral.
+ private long mBaseHostTimeNanos;
+ private long mPreviousResult; // To prevent retrograde timestamp
+
+ public MidiBtleTimeTracker(long now) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = now;
+ mPreviousNow = now;
+ }
+
+ /**
+ * @param timestamp
+ * 13-bit millis in range of 0 to 8191
+ * @param now
+ * current time in nanoseconds
+ * @return nanoseconds corresponding to the timestamp
+ */
+ public long convertTimestampToNanotime(int timestamp, long now) {
+ long deltaMillis = timestamp - mPreviousTimestamp;
+ // will be negative when timestamp wraps
+ if (deltaMillis < 0) {
+ deltaMillis += RANGE_MILLIS;
+ }
+ mPeripheralTimeMillis += deltaMillis;
+
+ // If we have not been called for a long time then
+ // make sure we have not wrapped multiple times.
+ if ((now - mPreviousNow) > (RANGE_NANOS / 2)) {
+ // Handle missed wraps.
+ long minimumTimeNanos = (now - mBaseHostTimeNanos)
+ - (RANGE_NANOS / 2);
+ long minimumTimeMillis = minimumTimeNanos / NANOS_PER_MILLI;
+ while (mPeripheralTimeMillis < minimumTimeMillis) {
+ mPeripheralTimeMillis += RANGE_MILLIS;
+ }
+ }
+
+ // Convert peripheral time millis to host time nanos.
+ long timestampHostNanos = (mPeripheralTimeMillis * NANOS_PER_MILLI)
+ + mBaseHostTimeNanos;
+
+ // The event cannot be in the future. So move window if we hit that.
+ if (timestampHostNanos > now) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = now;
+ timestampHostNanos = now;
+ } else {
+ // Timestamp should not be older than our window time.
+ long windowBottom = now - mWindowNanos;
+ if (timestampHostNanos < windowBottom) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = windowBottom;
+ timestampHostNanos = windowBottom;
+ }
+ }
+ // prevent retrograde timestamp
+ if (timestampHostNanos < mPreviousResult) {
+ timestampHostNanos = mPreviousResult;
+ }
+ mPreviousResult = timestampHostNanos;
+ mPreviousTimestamp = timestamp;
+ mPreviousNow = now;
+ return timestampHostNanos;
+ }
+
+ public int getWindowMillis() {
+ return mWindowMillis;
+ }
+
+ public void setWindowMillis(int window) {
+ this.mWindowMillis = window;
+ }
+
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java
new file mode 100644
index 0000000..da4b63a
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+/**
+ * This is an abstract base class that decodes a packet buffer and passes it to a
+ * {@link android.media.midi.MidiReceiver}
+ */
+public abstract class PacketDecoder {
+
+ /**
+ * Decodes MIDI data in a packet and passes it to a {@link android.media.midi.MidiReceiver}
+ * @param buffer the packet to decode
+ * @param receiver the {@link android.media.midi.MidiReceiver} to receive the decoded MIDI data
+ */
+ abstract public void decodePacket(byte[] buffer, MidiReceiver receiver);
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java
new file mode 100644
index 0000000..12c8b9b
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+/**
+ * This is an abstract base class that encodes MIDI data into a packet buffer.
+ * PacketEncoder receives data via its {@link android.media.midi.MidiReceiver#onReceive} method
+ * and notifies its client of packets to write via the {@link PacketEncoder.PacketReceiver}
+ * interface.
+ */
+public abstract class PacketEncoder extends MidiReceiver {
+
+ public interface PacketReceiver {
+ /** Called to write an accumulated packet.
+ * @param buffer the packet buffer to write
+ * @param count the number of bytes in the packet buffer to write
+ */
+ public void writePacket(byte[] buffer, int count);
+ }
+
+ /**
+ * Called to inform PacketEncoder when the previous write is complete.
+ */
+ abstract public void writeComplete();
+}
diff --git a/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java
index 46cb64e..b8f5704 100644
--- a/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java
+++ b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java
@@ -19,7 +19,7 @@ package com.android.mediadump;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
-import android.content.DialogInterface;;
+import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java
index 86c23c7..16b4c43 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java
@@ -60,6 +60,7 @@ public class MediaNames {
public static final String VIDEO_H264_AAC = "/sdcard/media_api/video/H264_320_AAC_64.3gp";
public static final String VIDEO_H264_AMR = "/sdcard/media_api/video/H264_320_AMRNB_6.3gp";
public static final String VIDEO_HEVC_AAC = "/sdcard/media_api/video/HEVC_320_AAC_128.mp4";
+ public static final String VIDEO_MPEG2_AAC = "/sdcard/media_api/video/MPEG2_1500_AAC_128.mp4";
public static final String VIDEO_HIGHRES_H263 = "/sdcard/media_api/video/H263_500_AMRNB_12.3gp";
public static final String VIDEO_HIGHRES_MP4 = "/sdcard/media_api/video/H264_500_AAC_128.3gp";
public static final String VIDEO_WEBM = "/sdcard/media_api/video/big-buck-bunny_trailer.webm";
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CodecTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CodecTest.java
index 66ed933..bcfcbf3 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CodecTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CodecTest.java
@@ -823,13 +823,20 @@ public class CodecTest {
duration = mMediaPlayer.getDuration();
// start to play
mMediaPlayer.start();
- waittime = duration - mMediaPlayer.getCurrentPosition();
- synchronized(onCompletion){
- try {
- onCompletion.wait(waittime + buffertime);
- }catch (Exception e) {
- Log.v(TAG, "playMediaSamples are interrupted");
- return false;
+ if (duration < 0) {
+ Log.w(TAG, filePath + " has unknown duration, waiting until playback completes");
+ while (mMediaPlayer.isPlaying()) {
+ SystemClock.sleep(1000);
+ }
+ } else {
+ waittime = duration - mMediaPlayer.getCurrentPosition();
+ synchronized(onCompletion){
+ try {
+ onCompletion.wait(waittime + buffertime);
+ } catch (Exception e) {
+ Log.v(TAG, "playMediaSamples are interrupted");
+ return false;
+ }
}
}
terminateMessageLooper();
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
index d7069cac..35540e3 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
@@ -27,6 +27,8 @@ import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.hardware.Camera;
+import android.media.MediaCodec;
+import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.media.EncoderCapabilities;
@@ -224,10 +226,12 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
private boolean recordVideoFromSurface(
int frameRate, int captureRate, int width, int height,
- int videoFormat, int outFormat, String outFile, boolean videoOnly) {
+ int videoFormat, int outFormat, String outFile, boolean videoOnly,
+ Surface persistentSurface) {
Log.v(TAG,"recordVideoFromSurface");
MediaRecorder recorder = new MediaRecorder();
int sleepTime = 33; // normal capture at 33ms / frame
+ Surface surface = null;
try {
if (!videoOnly) {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
@@ -245,8 +249,15 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
if (!videoOnly) {
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
+ if (persistentSurface != null) {
+ Log.v(TAG, "using persistent surface");
+ surface = persistentSurface;
+ recorder.setInputSurface(surface);
+ }
recorder.prepare();
- Surface surface = recorder.getSurface();
+ if (persistentSurface == null) {
+ surface = recorder.getSurface();
+ }
Paint paint = new Paint();
paint.setTextSize(16);
@@ -282,11 +293,15 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
Log.v(TAG, "stop");
recorder.stop();
- recorder.release();
} catch (Exception e) {
- Log.v("record video failed ", e.toString());
- recorder.release();
+ Log.v(TAG, "record video failed: " + e.toString());
return false;
+ } finally {
+ recorder.release();
+ // release surface if not using persistent surface
+ if (persistentSurface == null && surface != null) {
+ surface.release();
+ }
}
return true;
}
@@ -426,6 +441,29 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
return validVideo;
}
+ private boolean validateMetadata(String filePath, int captureRate) {
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+
+ retriever.setDataSource(filePath);
+
+ // verify capture rate meta key is present and correct
+ String captureFps = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE);
+
+ if (captureFps == null) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is missing");
+ return false;
+ }
+
+ if (Math.abs(Float.parseFloat(captureFps) - captureRate) > 0.001) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is incorrect: "
+ + captureFps + "vs. " + captureRate);
+ return false;
+ }
+
+ // verify other meta keys here if necessary
+ return true;
+ }
@LargeTest
/*
* This test case set the camera in portrait mode.
@@ -526,7 +564,7 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
success = recordVideoFromSurface(frameRate, 0, 352, 288, codec,
MediaRecorder.OutputFormat.THREE_GPP, filename,
- k == 0 ? true : false /* videoOnly */);
+ k == 0 ? true : false /* videoOnly */, null);
if (success) {
success = validateVideo(filename, 352, 288);
}
@@ -540,6 +578,40 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
assertTrue("testSurfaceRecording", noOfFailure == 0);
}
+ public void testPersistentSurfaceRecording() {
+ boolean success = false;
+ int noOfFailure = 0;
+ Surface surface = null;
+ try {
+ int codec = MediaRecorder.VideoEncoder.H264;
+ int frameRate = MediaProfileReader.getMaxFrameRateForCodec(codec);
+ surface = MediaCodec.createPersistentInputSurface();
+ for (int k = 0; k < 2; k++) {
+ String filename = "/sdcard/surface_persistent" + k + ".3gp";
+
+ Log.v(TAG, "test persistent surface - round " + k);
+ success = recordVideoFromSurface(frameRate, 0, 352, 288, codec,
+ MediaRecorder.OutputFormat.THREE_GPP, filename,
+ true /* videoOnly */, surface);
+ if (success) {
+ success = validateVideo(filename, 352, 288);
+ }
+ if (!success) {
+ noOfFailure++;
+ }
+ }
+ } catch (Exception e) {
+ Log.v(TAG, e.toString());
+ } finally {
+ if (surface != null) {
+ Log.v(TAG, "releasing persistent surface");
+ surface.release();
+ surface = null;
+ }
+ }
+ assertTrue("testPersistentSurfaceRecording", noOfFailure == 0);
+ }
+
// Test recording from surface source with/without audio
public void testSurfaceRecordingTimeLapse() {
boolean success = false;
@@ -555,13 +627,16 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
// always set videoOnly=false, MediaRecorder should disable
// audio automatically with time lapse/slow motion
- success = recordVideoFromSurface(frameRate,
- k==0 ? MIN_VIDEO_FPS : HIGH_SPEED_FPS,
- 352, 288, codec,
+ int captureRate = k==0 ? MIN_VIDEO_FPS : HIGH_SPEED_FPS;
+ success = recordVideoFromSurface(
+ frameRate, captureRate, 352, 288, codec,
MediaRecorder.OutputFormat.THREE_GPP,
- filename, false /* videoOnly */);
+ filename, false /* videoOnly */, null);
if (success) {
success = validateVideo(filename, 352, 288);
+ if (success) {
+ success = validateMetadata(filename, captureRate);
+ }
}
if (!success) {
noOfFailure++;
@@ -569,6 +644,7 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
}
} catch (Exception e) {
Log.v(TAG, e.toString());
+ noOfFailure++;
}
assertTrue("testSurfaceRecordingTimeLapse", noOfFailure == 0);
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
index cc50c43..14c2619 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
@@ -20,8 +20,6 @@ import android.hardware.CameraInfo;
import android.hardware.ICamera;
import android.hardware.ICameraClient;
import android.hardware.ICameraServiceListener;
-import android.hardware.IProCameraCallbacks;
-import android.hardware.IProCameraUser;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
import android.hardware.camera2.impl.CameraMetadataNative;
@@ -181,30 +179,6 @@ public class CameraBinderTest extends AndroidTestCase {
}
}
- static class DummyProCameraCallbacks extends DummyBase implements IProCameraCallbacks {
- }
-
- @SmallTest
- public void testConnectPro() throws Exception {
- for (int cameraId = 0; cameraId < mUtils.getGuessedNumCameras(); ++cameraId) {
-
- IProCameraCallbacks dummyCallbacks = new DummyProCameraCallbacks();
-
- String clientPackageName = getContext().getPackageName();
-
- BinderHolder holder = new BinderHolder();
- CameraBinderDecorator.newInstance(mUtils.getCameraService())
- .connectPro(dummyCallbacks, cameraId,
- clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder);
- IProCameraUser cameraUser = IProCameraUser.Stub.asInterface(holder.getBinder());
- assertNotNull(String.format("Camera %s was null", cameraId), cameraUser);
-
- Log.v(TAG, String.format("Camera %s connected", cameraId));
-
- cameraUser.disconnect();
- }
- }
-
@SmallTest
public void testConnectLegacy() throws Exception {
final int CAMERA_HAL_API_VERSION_1_0 = 0x100;
@@ -287,6 +261,16 @@ public class CameraBinderTest extends AndroidTestCase {
// TODO Auto-generated method stub
}
+
+ /*
+ * (non-Javadoc)
+ * @see android.hardware.camera2.ICameraDeviceCallbacks#onPrepared()
+ */
+ @Override
+ public void onPrepared(int streamId) throws RemoteException {
+ // TODO Auto-generated method stub
+
+ }
}
@SmallTest
@@ -316,6 +300,11 @@ public class CameraBinderTest extends AndroidTestCase {
throws RemoteException {
Log.v(TAG, String.format("Camera %d has status changed to 0x%x", cameraId, status));
}
+ public void onTorchStatusChanged(int status, String cameraId)
+ throws RemoteException {
+ Log.v(TAG, String.format("Camera %s has torch status changed to 0x%x",
+ cameraId, status));
+ }
}
/**
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
index 3cae19d..d71b44b 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
@@ -19,12 +19,14 @@ package com.android.mediaframeworktest.integration;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.utils.BinderHolder;
import android.media.Image;
import android.media.ImageReader;
@@ -67,6 +69,7 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
private CameraBinderTestUtils mUtils;
private ICameraDeviceCallbacks.Stub mMockCb;
private Surface mSurface;
+ private OutputConfiguration mOutputConfiguration;
private HandlerThread mHandlerThread;
private Handler mHandler;
ImageReader mImageReader;
@@ -130,6 +133,16 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
// TODO Auto-generated method stub
}
+
+ /*
+ * (non-Javadoc)
+ * @see android.hardware.camera2.ICameraDeviceCallbacks#onPrepared()
+ */
+ @Override
+ public void onPrepared(int streamId) throws RemoteException {
+ // TODO Auto-generated method stub
+
+ }
}
class IsMetadataNotEmpty extends ArgumentMatcher<CameraMetadataNative> {
@@ -147,6 +160,7 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
MAX_NUM_IMAGES);
mImageReader.setOnImageAvailableListener(new ImageDropperListener(), mHandler);
mSurface = mImageReader.getSurface();
+ mOutputConfiguration = new OutputConfiguration(mSurface);
}
private CaptureRequest.Builder createDefaultBuilder(boolean needStream) throws Exception {
@@ -157,12 +171,12 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
assertEquals(CameraBinderTestUtils.NO_ERROR, status);
assertFalse(metadata.isEmpty());
- CaptureRequest.Builder request = new CaptureRequest.Builder(metadata);
+ CaptureRequest.Builder request = new CaptureRequest.Builder(metadata, /*reprocess*/false,
+ CameraCaptureSession.SESSION_ID_NONE);
assertFalse(request.isEmpty());
assertFalse(metadata.isEmpty());
if (needStream) {
- int streamId = mCameraUser.createStream(/* ignored */10, /* ignored */20,
- /* ignored */30, mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
request.addTarget(mSurface);
}
@@ -235,12 +249,11 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
@SmallTest
public void testCreateStream() throws Exception {
- int streamId = mCameraUser.createStream(/* ignored */10, /* ignored */20, /* ignored */30,
- mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
assertEquals(CameraBinderTestUtils.ALREADY_EXISTS,
- mCameraUser.createStream(/* ignored */0, /* ignored */0, /* ignored */0, mSurface));
+ mCameraUser.createStream(mOutputConfiguration));
assertEquals(CameraBinderTestUtils.NO_ERROR, mCameraUser.deleteStream(streamId));
}
@@ -257,20 +270,19 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
public void testCreateStreamTwo() throws Exception {
// Create first stream
- int streamId = mCameraUser.createStream(/* ignored */0, /* ignored */0, /* ignored */0,
- mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
assertEquals(CameraBinderTestUtils.ALREADY_EXISTS,
- mCameraUser.createStream(/* ignored */0, /* ignored */0, /* ignored */0, mSurface));
+ mCameraUser.createStream(mOutputConfiguration));
// Create second stream with a different surface.
SurfaceTexture surfaceTexture = new SurfaceTexture(/* ignored */0);
surfaceTexture.setDefaultBufferSize(640, 480);
Surface surface2 = new Surface(surfaceTexture);
+ OutputConfiguration output2 = new OutputConfiguration(surface2);
- int streamId2 = mCameraUser.createStream(/* ignored */0, /* ignored */0, /* ignored */0,
- surface2);
+ int streamId2 = mCameraUser.createStream(output2);
assertEquals(1, streamId2);
// Clean up streams
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
index 244b07f..c528165 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
@@ -432,7 +432,22 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
assertTrue("HEVC playback memory test", memoryResult);
}
- // Test case 4: Capture the memory usage after every 20 video only recorded
+ // Test case 4: Capture the memory usage after every 20 mpeg2 playback
+ @LargeTest
+ public void testMPEG2VideoPlaybackMemoryUsage() throws Exception {
+ boolean memoryResult = false;
+
+ mStartPid = getMediaserverPid();
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ mediaStressPlayback(MediaNames.VIDEO_MPEG2_AAC);
+ getMemoryWriteToLog(i);
+ writeProcmemInfo();
+ }
+ memoryResult = validateMemoryResult(mStartPid, mStartMemory, DECODER_LIMIT);
+ assertTrue("MPEG2 playback memory test", memoryResult);
+ }
+
+ // Test case 5: Capture the memory usage after every 20 video only recorded
@LargeTest
public void testH263RecordVideoOnlyMemoryUsage() throws Exception {
if (mCamcorderProfile != null) {
@@ -453,7 +468,7 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
}
}
- // Test case 5: Capture the memory usage after every 20 video only recorded
+ // Test case 6: Capture the memory usage after every 20 video only recorded
@LargeTest
public void testMpeg4RecordVideoOnlyMemoryUsage() throws Exception {
if (mCamcorderProfile != null) {
@@ -474,7 +489,7 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
}
}
- // Test case 6: Capture the memory usage after every 20 video and audio
+ // Test case 7: Capture the memory usage after every 20 video and audio
// recorded
@LargeTest
public void testRecordVideoAudioMemoryUsage() throws Exception {
@@ -496,7 +511,7 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
}
}
- // Test case 7: Capture the memory usage after every 20 audio only recorded
+ // Test case 8: Capture the memory usage after every 20 audio only recorded
@LargeTest
public void testRecordAudioOnlyMemoryUsage() throws Exception {
boolean memoryResult = false;
@@ -511,7 +526,7 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
assertTrue("audio record only memory test", memoryResult);
}
- // Test case 8: Capture the memory usage after every 20 camera preview
+ // Test case 9: Capture the memory usage after every 20 camera preview
@LargeTest
public void testCameraPreviewMemoryUsage() throws Exception {
boolean memoryResult = false;
diff --git a/media/tests/contents/media_api/video/MPEG2_1500_AAC_128.mp4 b/media/tests/contents/media_api/video/MPEG2_1500_AAC_128.mp4
new file mode 100644
index 0000000..33f66a0
--- /dev/null
+++ b/media/tests/contents/media_api/video/MPEG2_1500_AAC_128.mp4
Binary files differ