summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioDeviceInfo.java (renamed from media/java/android/media/AudioDevice.java)97
-rw-r--r--media/java/android/media/AudioDevicesManager.java348
-rw-r--r--media/java/android/media/AudioFormat.java174
-rw-r--r--media/java/android/media/AudioManager.java138
-rw-r--r--media/java/android/media/AudioPortEventHandler.java14
-rw-r--r--media/java/android/media/AudioRecord.java617
-rw-r--r--media/java/android/media/AudioSystem.java2
-rw-r--r--media/java/android/media/AudioTrack.java776
-rw-r--r--media/java/android/media/DataSource.java43
-rw-r--r--media/java/android/media/Image.java167
-rw-r--r--media/java/android/media/ImageReader.java258
-rw-r--r--media/java/android/media/ImageUtils.java120
-rw-r--r--media/java/android/media/ImageWriter.java805
-rw-r--r--media/java/android/media/MediaCodec.java539
-rw-r--r--media/java/android/media/MediaCodecInfo.java266
-rw-r--r--media/java/android/media/MediaCodecList.java9
-rw-r--r--media/java/android/media/MediaCrypto.java28
-rw-r--r--media/java/android/media/MediaCryptoException.java8
-rw-r--r--media/java/android/media/MediaDataSource.java62
-rw-r--r--media/java/android/media/MediaDescription.java32
-rw-r--r--media/java/android/media/MediaDrm.java560
-rw-r--r--media/java/android/media/MediaExtractor.java74
-rw-r--r--media/java/android/media/MediaFormat.java57
-rw-r--r--media/java/android/media/MediaHTTPConnection.java8
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java21
-rw-r--r--media/java/android/media/MediaMuxer.java41
-rw-r--r--media/java/android/media/MediaPlayer.java259
-rw-r--r--media/java/android/media/MediaRecorder.java27
-rw-r--r--media/java/android/media/MediaRouter.java18
-rw-r--r--media/java/android/media/MediaScanner.java11
-rw-r--r--media/java/android/media/MediaSync.java733
-rw-r--r--media/java/android/media/MediaTimestamp.java71
-rw-r--r--media/java/android/media/OnAudioDeviceConnectionListener.java17
-rw-r--r--media/java/android/media/OnAudioRecordRoutingListener.java29
-rw-r--r--media/java/android/media/OnAudioTrackRoutingListener.java29
-rw-r--r--media/java/android/media/PlaybackSettings.java202
-rw-r--r--media/java/android/media/SoundPool.java591
-rw-r--r--media/java/android/media/SyncSettings.java282
-rw-r--r--media/java/android/media/TimedMetaData.java71
-rw-r--r--media/java/android/media/VolumePolicy.java19
-rw-r--r--media/java/android/media/audiofx/AcousticEchoCanceler.java3
-rw-r--r--media/java/android/media/audiofx/AutomaticGainControl.java3
-rw-r--r--media/java/android/media/audiofx/NoiseSuppressor.java3
-rw-r--r--media/java/android/media/audiofx/Virtualizer.java46
-rw-r--r--media/java/android/media/audiopolicy/AudioMix.java36
-rw-r--r--media/java/android/media/midi/IMidiDeviceServer.aidl3
-rw-r--r--media/java/android/media/midi/MidiDevice.java14
-rw-r--r--media/java/android/media/midi/MidiDeviceInfo.java44
-rw-r--r--media/java/android/media/midi/MidiDeviceServer.java11
-rw-r--r--media/java/android/media/midi/MidiDeviceService.java2
-rw-r--r--media/java/android/media/midi/MidiDispatcher.java84
-rw-r--r--media/java/android/media/midi/MidiInputPort.java13
-rw-r--r--media/java/android/media/midi/MidiManager.java99
-rw-r--r--media/java/android/media/midi/MidiOutputPort.java26
-rw-r--r--media/java/android/media/midi/MidiPortImpl.java70
-rw-r--r--media/java/android/media/midi/MidiReceiver.java7
-rw-r--r--media/java/android/media/midi/package.html324
-rw-r--r--media/java/android/media/session/ISessionCallback.aidl5
-rw-r--r--media/java/android/media/session/ISessionController.aidl5
-rw-r--r--media/java/android/media/session/MediaController.java27
-rw-r--r--media/java/android/media/session/MediaSession.java23
-rw-r--r--media/java/android/media/session/PlaybackState.java17
-rw-r--r--media/java/android/media/tv/ITvInputClient.aidl3
-rw-r--r--media/java/android/media/tv/ITvInputManager.aidl6
-rw-r--r--media/java/android/media/tv/ITvInputSession.aidl6
-rw-r--r--media/java/android/media/tv/ITvInputSessionCallback.aidl3
-rw-r--r--media/java/android/media/tv/ITvInputSessionWrapper.java53
-rw-r--r--media/java/android/media/tv/TvContentRating.java37
-rw-r--r--media/java/android/media/tv/TvContract.java573
-rw-r--r--media/java/android/media/tv/TvInputInfo.java49
-rw-r--r--media/java/android/media/tv/TvInputManager.java513
-rw-r--r--media/java/android/media/tv/TvInputService.java449
-rw-r--r--media/java/android/media/tv/TvTrackInfo.java49
-rw-r--r--media/java/android/media/tv/TvView.java263
-rw-r--r--media/java/android/mtp/MtpStorage.java4
-rw-r--r--media/jni/Android.mk13
-rw-r--r--media/jni/android_media_ImageReader.cpp413
-rw-r--r--media/jni/android_media_ImageWriter.cpp1083
-rw-r--r--media/jni/android_media_MediaCodec.cpp21
-rw-r--r--media/jni/android_media_MediaCodecList.cpp25
-rw-r--r--media/jni/android_media_MediaCrypto.cpp43
-rw-r--r--media/jni/android_media_MediaDataSource.cpp148
-rw-r--r--media/jni/android_media_MediaDataSource.h73
-rw-r--r--media/jni/android_media_MediaDrm.cpp72
-rw-r--r--media/jni/android_media_MediaExtractor.cpp74
-rw-r--r--media/jni/android_media_MediaHTTPConnection.cpp1
-rw-r--r--media/jni/android_media_MediaMetadataRetriever.cpp19
-rw-r--r--media/jni/android_media_MediaPlayer.cpp148
-rw-r--r--media/jni/android_media_MediaSync.cpp431
-rw-r--r--media/jni/android_media_MediaSync.h59
-rw-r--r--media/jni/android_media_PlaybackSettings.h120
-rw-r--r--media/jni/android_media_SyncSettings.cpp91
-rw-r--r--media/jni/android_media_SyncSettings.h66
-rw-r--r--media/jni/soundpool/Android.mk2
-rw-r--r--media/jni/soundpool/SoundPool.cpp14
-rw-r--r--media/jni/soundpool/android_media_SoundPool.cpp (renamed from media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp)90
-rw-r--r--media/packages/BluetoothMidiService/Android.mk11
-rw-r--r--media/packages/BluetoothMidiService/AndroidManifest.xml17
-rw-r--r--media/packages/BluetoothMidiService/res/values/strings.xml19
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java276
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java61
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java115
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java157
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java109
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java33
-rw-r--r--media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java41
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java34
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java36
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java30
109 files changed, 12239 insertions, 2202 deletions
diff --git a/media/java/android/media/AudioDevice.java b/media/java/android/media/AudioDeviceInfo.java
index df4d60d..d58b1d1 100644
--- a/media/java/android/media/AudioDevice.java
+++ b/media/java/android/media/AudioDeviceInfo.java
@@ -20,9 +20,8 @@ import android.util.SparseIntArray;
/**
* Class to provide information about the audio devices.
- * @hide
*/
-public class AudioDevice {
+public class AudioDeviceInfo {
/**
* A device type associated with an unknown or uninitialized device.
@@ -42,7 +41,7 @@ public class AudioDevice {
*/
public static final int TYPE_WIRED_HEADSET = 3;
/**
- * A device type describing a pair of wired headphones .
+ * A device type describing a pair of wired headphones.
*/
public static final int TYPE_WIRED_HEADPHONES = 4;
/**
@@ -54,7 +53,7 @@ public class AudioDevice {
*/
public static final int TYPE_LINE_DIGITAL = 6;
/**
- * A device type describing a Bluetooth device typically used for telephony .
+ * A device type describing a Bluetooth device typically used for telephony.
*/
public static final int TYPE_BLUETOOTH_SCO = 7;
/**
@@ -106,46 +105,92 @@ public class AudioDevice {
*/
public static final int TYPE_AUX_LINE = 19;
- AudioDevicePortConfig mConfig;
+ private final AudioDevicePort mPort;
- AudioDevice(AudioDevicePortConfig config) {
- mConfig = new AudioDevicePortConfig(config);
+ AudioDeviceInfo(AudioDevicePort port) {
+ mPort = port;
}
/**
* @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The internal device ID.
*/
- public boolean isInputDevice() {
- return (mConfig.port().role() == AudioPort.ROLE_SOURCE);
+ public int getId() {
+ return mPort.handle().id();
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The human-readable name of the audio device.
*/
- public boolean isOutputDevice() {
- return (mConfig.port().role() == AudioPort.ROLE_SINK);
+ public String getName() {
+ return mPort.name();
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return The "address" string of the device. This generally contains device-specific
+ * parameters.
*/
- public int getDeviceType() {
- return INT_TO_EXT_DEVICE_MAPPING.get(mConfig.port().type(), TYPE_UNKNOWN);
+ // TODO Is there a compelling reason to expose this?
+ public String getAddress() {
+ return mPort.address();
+ }
+
+ /**
+ * @return true if the audio device is a source for audio data (e.e an input).
+ */
+ public boolean isSource() {
+ return mPort.role() == AudioPort.ROLE_SOURCE;
}
/**
- * @hide
- * CANDIDATE FOR PUBLIC API
- * @return
+ * @return true if the audio device is a sink for audio data (i.e. an output).
*/
- public String getAddress() {
- return mConfig.port().address();
+ public boolean isSink() {
+ return mPort.role() == AudioPort.ROLE_SINK;
+ }
+
+ /**
+ * @return An array of sample rates supported by the audio device.
+ */
+ public int[] getSampleRates() {
+ return mPort.samplingRates();
+ }
+
+ /**
+ * @return An array of channel masks supported by the audio device (defined in
+ * AudioFormat.java).
+ */
+ public int[] getChannelMasks() {
+ return mPort.channelMasks();
+ }
+
+ /**
+ * @return An array of channel counts supported by the audio device.
+ */
+ public int[] getChannelCounts() {
+ int[] masks = getChannelMasks();
+ int[] counts = new int[masks.length];
+ for (int mask_index = 0; mask_index < masks.length; mask_index++) {
+ counts[mask_index] = isSink()
+ ? AudioFormat.channelCountFromOutChannelMask(masks[mask_index])
+ : AudioFormat.channelCountFromInChannelMask(masks[mask_index]);
+ }
+ return counts;
+ }
+
+ /**
+ * @return An array of audio format IDs supported by the audio device (defined in
+ * AudioFormat.java)
+ */
+ public int[] getFormats() {
+ return mPort.formats();
+ }
+
+ /**
+ * @return The device type identifier of the audio device (i.e. TYPE_BUILTIN_SPEAKER).
+ */
+ public int getType() {
+ return INT_TO_EXT_DEVICE_MAPPING.get(mPort.type(), TYPE_UNKNOWN);
}
/** @hide */
diff --git a/media/java/android/media/AudioDevicesManager.java b/media/java/android/media/AudioDevicesManager.java
index ee11eef..ca238d7 100644
--- a/media/java/android/media/AudioDevicesManager.java
+++ b/media/java/android/media/AudioDevicesManager.java
@@ -17,24 +17,54 @@
package android.media;
import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.ArrayMap;
+import android.util.Pair;
import android.util.Slog;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.concurrent.CopyOnWriteArrayList;
import java.util.HashMap;
import java.util.Iterator;
-/** @hide
- * API candidate
+/**
+ * AudioDevicesManager implements the Android Media Audio device enumeration and notification
+ * functionality. This functionality is in two comlementary parts.
+ * <ol>
+ * <li>{@link AudioDevicesManager#listDevices(int)} gets the list of current audio devices
+ * </li>
+ * <li>{@link AudioDevicesManager#addOnAudioDeviceConnectionListener(OnAudioDeviceConnectionListener, android.os.Handler)}
+ * provides a mechanism for applications to be informed of audio device connect/disconnect events.
+ * </li>
+ * </ol>
*/
public class AudioDevicesManager {
+
private static String TAG = "AudioDevicesManager";
- private static boolean DEBUG = true;
+
+ private static boolean DEBUG = false;
private AudioManager mAudioManager = null;
+
private OnAmPortUpdateListener mPortListener = null;
- /*
- * Enum/Selection API
+ /**
+ * The message sent to apps when the contents of the device list changes if they provide
+ * a {#link Handler} object to addOnAudioDeviceConnectionListener().
+ */
+ private final static int MSG_DEVICES_LIST_CHANGE = 0;
+
+ private ArrayMap<OnAudioDeviceConnectionListener, NativeEventHandlerDelegate>
+ mDeviceConnectionListeners =
+ new ArrayMap<OnAudioDeviceConnectionListener, NativeEventHandlerDelegate>();
+
+ /**
+ * @hide
+ * The AudioDevicesManager class is used to enumerate the physical audio devices connected
+ * to the system. See also {@link AudioDeviceInfo}.
*/
public AudioDevicesManager(Context context) {
mAudioManager = (AudioManager)context.getSystemService(Context.AUDIO_SERVICE);
@@ -42,214 +72,120 @@ public class AudioDevicesManager {
mAudioManager.registerAudioPortUpdateListener(mPortListener);
}
- /** @hide
- * API candidate
+ /**
+ * Specifies to the {@link AudioDevicesManager#listDevices(int)} method to include
+ * source (i.e. input) audio devices.
*/
- //TODO Merge this class into android.media.AudioDevice
- public class AudioDeviceInfo {
- private AudioDevicePort mPort = null;
-
- /** @hide */
- /* package */ AudioDeviceInfo(AudioDevicePort port) {
- mPort = port;
- }
-
- public int getId() { return mPort.handle().id(); }
-
- public String getName() { return mPort.name(); }
-
- public int getType() {
- return mPort.type();
- }
-
- public String getAddress() {
- return mPort.address();
- }
-
- public int getRole() { return mPort.role(); }
-
- public int[] getSampleRates() { return mPort.samplingRates(); }
-
- public int[] getChannelMasks() { return mPort.channelMasks(); }
-
- public int[] getChannelCounts() {
- int[] masks = getChannelMasks();
- int[] counts = new int[masks.length];
- for (int mask_index = 0; mask_index < masks.length; mask_index++) {
- counts[mask_index] = getRole() == AudioPort.ROLE_SINK
- ? AudioFormat.channelCountFromOutChannelMask(masks[mask_index])
- : AudioFormat.channelCountFromInChannelMask(masks[mask_index]);
- }
- return counts;
- }
-
- /* The format IDs are in AudioFormat.java */
- public int[] getFormats() { return mPort.formats(); }
+ public static final int LIST_DEVICES_INPUTS = 0x0001;
- public String toString() { return "" + getId() + " - " + getName(); }
- }
-
- /** @hide */
- public static final int LIST_DEVICES_OUTPUTS = 0x0001;
- /** @hide */
- public static final int LIST_DEVICES_INPUTS = 0x0002;
- /** @hide */
- public static final int LIST_DEVICES_BUILTIN = 0x0004;
- /** @hide */
- public static final int LIST_DEVICES_USB = 0x0008;
- // TODO implement the semantics for these.
- /** @hide */
- public static final int LIST_DEVICES_WIRED = 0x0010;
- /** @hide */
- public static final int LIST_DEVICES_UNWIRED = 0x0020;
+ /**
+ * Specifies to the {@link AudioDevicesManager#listDevices(int)} method to include
+ * sink (i.e. output) audio devices.
+ */
+ public static final int LIST_DEVICES_OUTPUTS = 0x0002;
- /** @hide */
+ /**
+ * Specifies to the {@link AudioDevicesManager#listDevices(int)} method to include both
+ * source and sink devices.
+ */
public static final int LIST_DEVICES_ALL = LIST_DEVICES_OUTPUTS | LIST_DEVICES_INPUTS;
+ /**
+ * Determines if a given AudioDevicePort meets the specified filter criteria.
+ * @param port The port to test.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link LIST_DEVICES_OUTPUTS} and {@link LIST_DEVICES_INPUTS}
+ **/
private boolean checkFlags(AudioDevicePort port, int flags) {
- // Inputs / Outputs
- boolean passed =
- port.role() == AudioPort.ROLE_SINK && (flags & LIST_DEVICES_OUTPUTS) != 0 ||
- port.role() == AudioPort.ROLE_SOURCE && (flags & LIST_DEVICES_INPUTS) != 0;
-
- // USB
- if (passed && (flags & LIST_DEVICES_USB) != 0) {
- int role = port.role();
- int type = port.type();
- Slog.i(TAG, " role:" + role + " type:0x" + Integer.toHexString(type));
- passed =
- (role == AudioPort.ROLE_SINK && (type & AudioSystem.DEVICE_OUT_ALL_USB) != 0) ||
- (role == AudioPort.ROLE_SOURCE && (type & AudioSystem.DEVICE_IN_ALL_USB) != 0);
- }
-
- return passed;
+ return port.role() == AudioPort.ROLE_SINK && (flags & LIST_DEVICES_OUTPUTS) != 0 ||
+ port.role() == AudioPort.ROLE_SOURCE && (flags & LIST_DEVICES_INPUTS) != 0;
}
- /** @hide */
- public ArrayList<AudioDeviceInfo> listDevices(int flags) {
- Slog.i(TAG, "AudioManager.listDevices(" + Integer.toHexString(flags) + ")");
-
+ /**
+ * Generates a list of AudioDeviceInfo objects corresponding to the audio devices currently
+ * connected to the system and meeting the criteria specified in the <code>flags</code>
+ * parameter.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link LIST_DEVICES_OUTPUTS}, {@link LIST_DEVICES_INPUTS} and {@link LIST_DEVICES_ALL}.
+ * @return A (possibly zero-length) array of AudioDeviceInfo objects.
+ */
+ public AudioDeviceInfo[] listDevices(int flags) {
ArrayList<AudioDevicePort> ports = new ArrayList<AudioDevicePort>();
int status = mAudioManager.listAudioDevicePorts(ports);
-
- Slog.i(TAG, " status:" + status + " numPorts:" + ports.size());
-
- ArrayList<AudioDeviceInfo> deviceList = new ArrayList<AudioDeviceInfo>();
-
- if (status == AudioManager.SUCCESS) {
- deviceList = new ArrayList<AudioDeviceInfo>();
- for (AudioDevicePort port : ports) {
- if (checkFlags(port, flags)) {
- deviceList.add(new AudioDeviceInfo(port));
- }
- }
+ if (status != AudioManager.SUCCESS) {
+ // fail and bail!
+ return new AudioDeviceInfo[0];
}
- return deviceList;
- }
- private ArrayList<OnAudioDeviceConnectionListener> mDeviceConnectionListeners =
- new ArrayList<OnAudioDeviceConnectionListener>();
-
- private HashMap<Integer, AudioPort> mCurrentPortlist =
- new HashMap<Integer, AudioPort>();
-
- private ArrayList<AudioDeviceInfo> calcAddedDevices(AudioPort[] portList) {
- ArrayList<AudioDeviceInfo> addedDevices = new ArrayList<AudioDeviceInfo>();
- synchronized(mCurrentPortlist) {
- for(int portIndex = 0; portIndex < portList.length; portIndex++) {
- if (portList[portIndex] instanceof AudioDevicePort) {
- if (!mCurrentPortlist.containsKey(portList[portIndex].handle().id())) {
- addedDevices.add(new AudioDeviceInfo((AudioDevicePort)portList[portIndex]));
- }
- }
+ // figure out how many AudioDeviceInfo we need space for
+ int numRecs = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkFlags(port, flags)) {
+ numRecs++;
}
}
- return addedDevices;
- }
- private boolean hasPortId(AudioPort[] portList, int id) {
- for(int portIndex = 0; portIndex < portList.length; portIndex++) {
- if (portList[portIndex].handle().id() == id) {
- return true;
+ // Now load them up
+ AudioDeviceInfo[] deviceList = new AudioDeviceInfo[numRecs];
+ int slot = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkFlags(port, flags)) {
+ deviceList[slot++] = new AudioDeviceInfo(port);
}
}
- return false;
- }
-
- private ArrayList<AudioDeviceInfo> calcRemovedDevices(AudioPort[] portList) {
- ArrayList<AudioDeviceInfo> removedDevices = new ArrayList<AudioDeviceInfo>();
- synchronized (mCurrentPortlist) {
- Iterator it = mCurrentPortlist.entrySet().iterator();
- while (it.hasNext()) {
- HashMap.Entry pairs = (HashMap.Entry)it.next();
- if (pairs.getValue() instanceof AudioDevicePort) {
- if (!hasPortId(portList, ((Integer)pairs.getKey()).intValue())) {
- removedDevices.add(new AudioDeviceInfo((AudioDevicePort)pairs.getValue()));
- }
- }
- }
- }
- return removedDevices;
+ return deviceList;
}
- private void buildCurrentDevicesList(AudioPort[] portList) {
- synchronized (mCurrentPortlist) {
- mCurrentPortlist.clear();
- for (int portIndex = 0; portIndex < portList.length; portIndex++) {
- if (portList[portIndex] instanceof AudioDevicePort) {
- mCurrentPortlist.put(portList[portIndex].handle().id(),
- (AudioDevicePort)portList[portIndex]);
- }
+ /**
+ * Adds an {@link OnAudioDeviceConnectionListener} to receive notifications of changes
+ * to the set of connected audio devices.
+ */
+ public void addOnAudioDeviceConnectionListener(OnAudioDeviceConnectionListener listener,
+ android.os.Handler handler) {
+ if (listener != null && !mDeviceConnectionListeners.containsKey(listener)) {
+ synchronized (mDeviceConnectionListeners) {
+ mDeviceConnectionListeners.put(
+ listener, new NativeEventHandlerDelegate(listener, handler));
}
}
}
- /** @hide */
- public void addDeviceConnectionListener(OnAudioDeviceConnectionListener listener) {
+ /**
+ * Removes an {@link OnAudioDeviceConnectionListener} which has been previously registered
+ * to receive notifications of changes to the set of connected audio devices.
+ */
+ public void removeOnAudioDeviceConnectionListener(OnAudioDeviceConnectionListener listener) {
synchronized (mDeviceConnectionListeners) {
- mDeviceConnectionListeners.add(listener);
+ if (mDeviceConnectionListeners.containsKey(listener)) {
+ mDeviceConnectionListeners.remove(listener);
+ }
}
}
- /** @hide */
- public void removeDeviceConnectionListener(OnAudioDeviceConnectionListener listener) {
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastDeviceListChange() {
+ Collection<NativeEventHandlerDelegate> values;
synchronized (mDeviceConnectionListeners) {
- mDeviceConnectionListeners.remove(listener);
+ values = mDeviceConnectionListeners.values();
+ }
+ for(NativeEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(MSG_DEVICES_LIST_CHANGE);
+ }
}
}
/**
- * @hide
+ * Handles Port list update notifications from the AudioManager
*/
private class OnAmPortUpdateListener implements AudioManager.OnAudioPortUpdateListener {
static final String TAG = "OnAmPortUpdateListener";
public void onAudioPortListUpdate(AudioPort[] portList) {
- Slog.i(TAG, "onAudioPortListUpdate() " + portList.length + " ports.");
- ArrayList<AudioDeviceInfo> addedDevices = calcAddedDevices(portList);
- ArrayList<AudioDeviceInfo> removedDevices = calcRemovedDevices(portList);
-
- ArrayList<OnAudioDeviceConnectionListener> listeners = null;
- synchronized (mDeviceConnectionListeners) {
- listeners =
- new ArrayList<OnAudioDeviceConnectionListener>(mDeviceConnectionListeners);
- }
-
- // Connect
- if (addedDevices.size() != 0) {
- for (OnAudioDeviceConnectionListener listener : listeners) {
- listener.onConnect(addedDevices);
- }
- }
-
- // Disconnect?
- if (removedDevices.size() != 0) {
- for (OnAudioDeviceConnectionListener listener : listeners) {
- listener.onDisconnect(removedDevices);
- }
- }
-
- buildCurrentDevicesList(portList);
+ broadcastDeviceListChange();
}
/**
@@ -257,14 +193,70 @@ public class AudioDevicesManager {
* @param patchList the updated list of audio patches
*/
public void onAudioPatchListUpdate(AudioPatch[] patchList) {
- Slog.i(TAG, "onAudioPatchListUpdate() " + patchList.length + " patches.");
+ if (DEBUG) {
+ Slog.d(TAG, "onAudioPatchListUpdate() " + patchList.length + " patches.");
+ }
}
/**
* Callback method called when the mediaserver dies
*/
public void onServiceDied() {
- Slog.i(TAG, "onServiceDied()");
+ if (DEBUG) {
+ Slog.i(TAG, "onServiceDied()");
+ }
+
+ broadcastDeviceListChange();
+ }
+ }
+
+ //---------------------------------------------------------
+ // Inner classes
+ //--------------------
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread.
+ */
+ private class NativeEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeEventHandlerDelegate(final OnAudioDeviceConnectionListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the addListener call was called in
+ looper = Looper.getMainLooper();
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_DEVICES_LIST_CHANGE:
+ // call the OnAudioDeviceConnectionListener
+ if (listener != null) {
+ listener.onAudioDeviceConnection();
+ }
+ break;
+ default:
+ Slog.e(TAG, "Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
}
}
}
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index 9a0266d..a7e092f 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -17,7 +17,7 @@
package android.media;
import android.annotation.IntDef;
-
+import android.annotation.NonNull;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -38,6 +38,7 @@ public class AudioFormat {
public static final int ENCODING_DEFAULT = 1;
// These values must be kept in sync with core/jni/android_media_AudioFormat.h
+ // Also sync av/services/audiopolicy/managerdefault/ConfigParsingUtils.h
/** Audio data format: PCM 16 bit per sample. Guaranteed to be supported by devices. */
public static final int ENCODING_PCM_16BIT = 2;
/** Audio data format: PCM 8 bit per sample. Not guaranteed to be supported by devices. */
@@ -48,18 +49,22 @@ public class AudioFormat {
public static final int ENCODING_AC3 = 5;
/** Audio data format: E-AC-3 compressed */
public static final int ENCODING_E_AC3 = 6;
+ /** Audio data format: DTS compressed */
+ public static final int ENCODING_DTS = 7;
+ /** Audio data format: DTS HD compressed */
+ public static final int ENCODING_DTS_HD = 8;
/** Invalid audio channel configuration */
- /** @deprecated use CHANNEL_INVALID instead */
+ /** @deprecated Use {@link #CHANNEL_INVALID} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0;
/** Default audio channel configuration */
- /** @deprecated use CHANNEL_OUT_DEFAULT or CHANNEL_IN_DEFAULT instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_DEFAULT} or {@link #CHANNEL_IN_DEFAULT} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1;
/** Mono audio configuration */
- /** @deprecated use CHANNEL_OUT_MONO or CHANNEL_IN_MONO instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_MONO} or {@link #CHANNEL_IN_MONO} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2;
/** Stereo (2 channel) audio configuration */
- /** @deprecated use CHANNEL_OUT_STEREO or CHANNEL_IN_STEREO instead */
+ /** @deprecated Use {@link #CHANNEL_OUT_STEREO} or {@link #CHANNEL_IN_STEREO} instead. */
@Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3;
/** Invalid audio channel mask */
@@ -68,7 +73,7 @@ public class AudioFormat {
public static final int CHANNEL_OUT_DEFAULT = 1;
// Output channel mask definitions below are translated to the native values defined in
- // in /system/core/include/system/audio.h in the JNI code of AudioTrack
+ // in /system/media/audio/include/system/audio.h in the JNI code of AudioTrack
public static final int CHANNEL_OUT_FRONT_LEFT = 0x4;
public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8;
public static final int CHANNEL_OUT_FRONT_CENTER = 0x10;
@@ -112,12 +117,11 @@ public class AudioFormat {
public static final int CHANNEL_OUT_5POINT1_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY |
CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT);
- // TODO does this need an @deprecated ?
- // different from AUDIO_CHANNEL_OUT_7POINT1
- public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ // different from AUDIO_CHANNEL_OUT_7POINT1 used internally, and not accepted by AudioRecord.
+ /** @deprecated Not the typical 7.1 surround configuration. Use {@link #CHANNEL_OUT_7POINT1_SURROUND} instead. */
+ @Deprecated public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT |
CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER);
- /** @hide */
// matches AUDIO_CHANNEL_OUT_7POINT1
public static final int CHANNEL_OUT_7POINT1_SURROUND = (
CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_FRONT_RIGHT |
@@ -235,6 +239,8 @@ public class AudioFormat {
case ENCODING_PCM_FLOAT:
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
return true;
default:
return false;
@@ -252,6 +258,8 @@ public class AudioFormat {
return true;
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
return false;
case ENCODING_INVALID:
default:
@@ -277,13 +285,15 @@ public class AudioFormat {
*/
// Update sound trigger JNI in core/jni/android_hardware_SoundTrigger.cpp when modifying this
// constructor
- private AudioFormat(int encoding, int sampleRate, int channelMask) {
+ private AudioFormat(int encoding, int sampleRate, int channelMask, int channelIndexMask) {
mEncoding = encoding;
mSampleRate = sampleRate;
mChannelMask = channelMask;
+ mChannelIndexMask = channelIndexMask;
mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_ENCODING |
AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE |
- AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK;
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK |
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
}
/** @hide */
@@ -294,10 +304,13 @@ public class AudioFormat {
public final static int AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE = 0x1 << 1;
/** @hide */
public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK = 0x1 << 2;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK = 0x1 << 3;
private int mEncoding;
private int mSampleRate;
private int mChannelMask;
+ private int mChannelIndexMask;
private int mPropertySetMask;
/**
@@ -336,6 +349,34 @@ public class AudioFormat {
return mChannelMask;
}
+ /**
+ * Return the channel index mask.
+ * @return one of the values that can be set in {@link Builder#setChannelIndexMask(int)} or
+ * {@link AudioFormat#CHANNEL_INVALID} if not set or an invalid mask was used.
+ */
+ public int getChannelIndexMask() {
+ if ((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) == 0) {
+ return CHANNEL_INVALID;
+ }
+ return mChannelIndexMask;
+ }
+
+ /**
+ * Return the channel count.
+ * @return the channel count derived from the channel position mask or the channel index mask.
+ * Zero is returned if both the channel position mask and the channel index mask are not set.
+ */
+ public int getChannelCount() {
+ final int channelIndexCount = Integer.bitCount(getChannelIndexMask());
+ int channelCount = channelCountFromOutChannelMask(getChannelMask());
+ if (channelCount == 0) {
+ channelCount = channelIndexCount;
+ } else if (channelCount != channelIndexCount && channelIndexCount != 0) {
+ channelCount = 0; // position and index channel count mismatch
+ }
+ return channelCount;
+ }
+
/** @hide */
public int getPropertySetMask() {
return mPropertySetMask;
@@ -359,6 +400,7 @@ public class AudioFormat {
private int mEncoding = ENCODING_INVALID;
private int mSampleRate = 0;
private int mChannelMask = CHANNEL_INVALID;
+ private int mChannelIndexMask = 0;
private int mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_NONE;
/**
@@ -375,6 +417,7 @@ public class AudioFormat {
mEncoding = af.mEncoding;
mSampleRate = af.mSampleRate;
mChannelMask = af.mChannelMask;
+ mChannelIndexMask = af.mChannelIndexMask;
mPropertySetMask = af.mPropertySetMask;
}
@@ -388,6 +431,7 @@ public class AudioFormat {
af.mEncoding = mEncoding;
af.mSampleRate = mSampleRate;
af.mChannelMask = mChannelMask;
+ af.mChannelIndexMask = mChannelIndexMask;
af.mPropertySetMask = mPropertySetMask;
return af;
}
@@ -400,6 +444,8 @@ public class AudioFormat {
* {@link AudioFormat#ENCODING_PCM_FLOAT},
* {@link AudioFormat#ENCODING_AC3},
* {@link AudioFormat#ENCODING_E_AC3}.
+ * {@link AudioFormat#ENCODING_DTS},
+ * {@link AudioFormat#ENCODING_DTS_HD}.
* @return the same Builder instance.
* @throws java.lang.IllegalArgumentException
*/
@@ -413,6 +459,8 @@ public class AudioFormat {
case ENCODING_PCM_FLOAT:
case ENCODING_AC3:
case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
mEncoding = encoding;
break;
case ENCODING_INVALID:
@@ -424,29 +472,104 @@ public class AudioFormat {
}
/**
- * Sets the channel mask.
+ * Sets the channel position mask.
+ * The channel position mask specifies the association between audio samples in a frame
+ * with named endpoint channels. The samples in the frame correspond to the
+ * named set bits in the channel position mask, in ascending bit order.
+ * See {@link #setChannelIndexMask(int)} to specify channels
+ * based on endpoint numbered channels.
* @param channelMask describes the configuration of the audio channels.
- * <p>For output, the mask should be a combination of
+ * <p> For output, the channelMask can be an OR-ed combination of
+ * channel position masks, e.g.
* {@link AudioFormat#CHANNEL_OUT_FRONT_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER},
* {@link AudioFormat#CHANNEL_OUT_FRONT_RIGHT},
- * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_LOW_FREQUENCY}
* {@link AudioFormat#CHANNEL_OUT_BACK_LEFT},
- * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT}.
- * <p>for input, the mask should be {@link AudioFormat#CHANNEL_IN_MONO} or
+ * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_BACK_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT}.
+ * <p> For a valid {@link AudioTrack} channel position mask,
+ * the following conditions apply:
+ * <br> (1) at most eight channel positions may be used;
+ * <br> (2) right/left pairs should be matched.
+ * <p> For input or {@link AudioRecord}, the mask should be
+ * {@link AudioFormat#CHANNEL_IN_MONO} or
* {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is
* guaranteed to work on all devices.
- * @return the same Builder instance.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
*/
- public Builder setChannelMask(int channelMask) {
- // only validated when used, with input or output context
+ public @NonNull Builder setChannelMask(int channelMask) throws IllegalArgumentException {
+ if (channelMask == 0) {
+ throw new IllegalArgumentException("Invalid zero channel mask");
+ } else if (/* channelMask != 0 && */ mChannelIndexMask != 0 &&
+ Integer.bitCount(channelMask) != Integer.bitCount(mChannelIndexMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for mask " +
+ Integer.toHexString(channelMask).toUpperCase());
+ }
mChannelMask = channelMask;
mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK;
return this;
}
/**
+ * Sets the channel index mask.
+ * A channel index mask specifies the association of audio samples in the frame
+ * with numbered endpoint channels. The i-th bit in the channel index
+ * mask corresponds to the i-th endpoint channel.
+ * For example, an endpoint with four channels is represented
+ * as index mask bits 0 through 3.
+ * See {@link #setChannelMask(int)} for a positional mask interpretation.
+ * <p> Both {@link AudioTrack} and {@link AudioRecord} support
+ * a channel index mask.
+ * If a channel index mask is specified it is used,
+ * otherwise the channel position mask specified
+ * by <code>setChannelMask</code> is used.
+ * For <code>AudioTrack</code> and <code>AudioRecord</code>,
+ * a channel position mask is not required if a channel index mask is specified.
+ *
+ * @param channelIndexMask describes the configuration of the audio channels.
+ * <p> For output, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of <code>AudioTrack</code> write samples
+ * to output sink channels.
+ * For example, a mask of <code>0xa</code>, or binary <code>1010</code>,
+ * means the <code>AudioTrack</code> write frame consists of two samples,
+ * which are routed to the second and the fourth channels of the output sink.
+ * Unmatched output sink channels are zero filled and unmatched
+ * <code>AudioTrack</code> write samples are dropped.
+ * <p> For input, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of input source channels to
+ * <code>AudioRecord</code> read samples.
+ * For example, a mask of <code>0x5</code>, or binary
+ * <code>101</code>, will read from the first and third channel of the input
+ * source device and store them in the first and second sample of the
+ * <code>AudioRecord</code> read frame.
+ * Unmatched input source channels are dropped and
+ * unmatched <code>AudioRecord</code> read samples are zero filled.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel index mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
+ */
+ public @NonNull Builder setChannelIndexMask(int channelIndexMask)
+ throws IllegalArgumentException {
+ if (channelIndexMask == 0) {
+ throw new IllegalArgumentException("Invalid zero channel index mask");
+ } else if (/* channelIndexMask != 0 && */ mChannelMask != 0 &&
+ Integer.bitCount(channelIndexMask) != Integer.bitCount(mChannelMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for index mask " +
+ Integer.toHexString(channelIndexMask).toUpperCase());
+ }
+ mChannelIndexMask = channelIndexMask;
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
+ return this;
+ }
+
+ /**
* Sets the sample rate.
* @param sampleRate the sample rate expressed in Hz
* @return the same Builder instance.
@@ -467,7 +590,8 @@ public class AudioFormat {
return new String("AudioFormat:"
+ " props=" + mPropertySetMask
+ " enc=" + mEncoding
- + " chan=0x" + Integer.toHexString(mChannelMask)
+ + " chan=0x" + Integer.toHexString(mChannelMask).toUpperCase()
+ + " chan_index=0x" + Integer.toHexString(mChannelIndexMask).toUpperCase()
+ " rate=" + mSampleRate);
}
@@ -478,7 +602,9 @@ public class AudioFormat {
ENCODING_PCM_16BIT,
ENCODING_PCM_FLOAT,
ENCODING_AC3,
- ENCODING_E_AC3
+ ENCODING_E_AC3,
+ ENCODING_DTS,
+ ENCODING_DTS_HD
})
@Retention(RetentionPolicy.SOURCE)
public @interface Encoding {}
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 365b935..6eaf812 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -38,6 +38,7 @@ import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
+import android.os.SystemProperties;
import android.os.SystemClock;
import android.os.ServiceManager;
import android.provider.Settings;
@@ -56,7 +57,8 @@ import java.util.Iterator;
*/
public class AudioManager {
- private final Context mApplicationContext;
+ private Context mOriginalContext;
+ private Context mApplicationContext;
private long mVolumeKeyUpTime;
private final boolean mUseVolumeKeySounds;
private final boolean mUseFixedVolume;
@@ -64,6 +66,16 @@ public class AudioManager {
private static final AudioPortEventHandler sAudioPortEventHandler = new AudioPortEventHandler();
/**
+ * System properties for whether the default microphone and speaker paths support
+ * near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ private static final String SYSTEM_PROPERTY_MIC_NEAR_ULTRASOUND =
+ "persist.audio.mic.ultrasound";
+ private static final String SYSTEM_PROPERTY_SPEAKER_NEAR_ULTRASOUND =
+ "persist.audio.spkr.ultrasound";
+ private static final String DEFAULT_RESULT_FALSE_STRING = "false";
+
+ /**
* Broadcast intent, a hint for applications that audio is about to become
* 'noisy' due to a change in audio outputs. For example, this intent may
* be sent when a wired headset is unplugged, or when an A2DP audio
@@ -134,6 +146,22 @@ public class AudioManager {
public static final String VOLUME_CHANGED_ACTION = "android.media.VOLUME_CHANGED_ACTION";
/**
+ * @hide Broadcast intent when the devices for a particular stream type changes.
+ * Includes the stream, the new devices and previous devices.
+ * Notes:
+ * - for internal platform use only, do not make public,
+ * - never used for "remote" volume changes
+ *
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_VOLUME_STREAM_DEVICES
+ * @see #EXTRA_PREV_VOLUME_STREAM_DEVICES
+ * @see #getDevicesForStream
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String STREAM_DEVICES_CHANGED_ACTION =
+ "android.media.STREAM_DEVICES_CHANGED_ACTION";
+
+ /**
* @hide Broadcast intent when a stream mute state changes.
* Includes the stream that changed and the new mute state
*
@@ -196,6 +224,18 @@ public class AudioManager {
"android.media.EXTRA_PREV_VOLUME_STREAM_VALUE";
/**
+ * @hide The devices associated with the stream for the stream devices changed intent.
+ */
+ public static final String EXTRA_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_VOLUME_STREAM_DEVICES";
+
+ /**
+ * @hide The previous devices associated with the stream for the stream devices changed intent.
+ */
+ public static final String EXTRA_PREV_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_PREV_VOLUME_STREAM_DEVICES";
+
+ /**
* @hide The new master volume mute state for the master mute changed intent.
* Value is boolean
*/
@@ -582,14 +622,33 @@ public class AudioManager {
* @hide
*/
public AudioManager(Context context) {
- mApplicationContext = context;
- mUseVolumeKeySounds = mApplicationContext.getResources().getBoolean(
+ setContext(context);
+ mUseVolumeKeySounds = getContext().getResources().getBoolean(
com.android.internal.R.bool.config_useVolumeKeySounds);
- mUseFixedVolume = mApplicationContext.getResources().getBoolean(
+ mUseFixedVolume = getContext().getResources().getBoolean(
com.android.internal.R.bool.config_useFixedVolume);
sAudioPortEventHandler.init();
}
+ private Context getContext() {
+ if (mApplicationContext == null) {
+ setContext(mOriginalContext);
+ }
+ if (mApplicationContext != null) {
+ return mApplicationContext;
+ }
+ return mOriginalContext;
+ }
+
+ private void setContext(Context context) {
+ mApplicationContext = context.getApplicationContext();
+ if (mApplicationContext != null) {
+ mOriginalContext = null;
+ } else {
+ mOriginalContext = context;
+ }
+ }
+
private static IAudioService getService()
{
if (sService != null) {
@@ -624,7 +683,7 @@ public class AudioManager {
* or {@link KeyEvent#KEYCODE_MEDIA_AUDIO_TRACK}.
*/
public void dispatchMediaKeyEvent(KeyEvent keyEvent) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.sendMediaButtonEvent(keyEvent, false);
}
@@ -670,7 +729,7 @@ public class AudioManager {
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
if (event.getRepeatCount() == 0) {
- MediaSessionLegacyHelper.getHelper(mApplicationContext)
+ MediaSessionLegacyHelper.getHelper(getContext())
.sendVolumeKeyEvent(event, false);
}
break;
@@ -698,7 +757,7 @@ public class AudioManager {
mVolumeKeyUpTime = SystemClock.uptimeMillis();
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
- MediaSessionLegacyHelper.getHelper(mApplicationContext)
+ MediaSessionLegacyHelper.getHelper(getContext())
.sendVolumeKeyEvent(event, false);
break;
}
@@ -744,7 +803,7 @@ public class AudioManager {
IAudioService service = getService();
try {
service.adjustStreamVolume(streamType, direction, flags,
- mApplicationContext.getOpPackageName());
+ getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in adjustStreamVolume", e);
}
@@ -774,7 +833,7 @@ public class AudioManager {
* @see #isVolumeFixed()
*/
public void adjustVolume(int direction, int flags) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.sendAdjustVolumeBy(USE_DEFAULT_STREAM_TYPE, direction, flags);
}
@@ -803,7 +862,7 @@ public class AudioManager {
* @see #isVolumeFixed()
*/
public void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.sendAdjustVolumeBy(suggestedStreamType, direction, flags);
}
@@ -811,7 +870,7 @@ public class AudioManager {
public void setMasterMute(boolean mute, int flags) {
IAudioService service = getService();
try {
- service.setMasterMute(mute, flags, mApplicationContext.getOpPackageName());
+ service.setMasterMute(mute, flags, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setMasterMute", e);
}
@@ -958,7 +1017,7 @@ public class AudioManager {
}
IAudioService service = getService();
try {
- service.setRingerModeExternal(ringerMode, mApplicationContext.getOpPackageName());
+ service.setRingerModeExternal(ringerMode, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setRingerMode", e);
}
@@ -979,7 +1038,7 @@ public class AudioManager {
public void setStreamVolume(int streamType, int index, int flags) {
IAudioService service = getService();
try {
- service.setStreamVolume(streamType, index, flags, mApplicationContext.getOpPackageName());
+ service.setStreamVolume(streamType, index, flags, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setStreamVolume", e);
}
@@ -1292,7 +1351,7 @@ public class AudioManager {
* @see #startBluetoothSco()
*/
public boolean isBluetoothScoAvailableOffCall() {
- return mApplicationContext.getResources().getBoolean(
+ return getContext().getResources().getBoolean(
com.android.internal.R.bool.config_bluetooth_sco_off_call);
}
@@ -1345,7 +1404,7 @@ public class AudioManager {
IAudioService service = getService();
try {
service.startBluetoothSco(mICallBack,
- mApplicationContext.getApplicationInfo().targetSdkVersion);
+ getContext().getApplicationInfo().targetSdkVersion);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in startBluetoothSco", e);
}
@@ -1493,7 +1552,7 @@ public class AudioManager {
public void setMicrophoneMute(boolean on){
IAudioService service = getService();
try {
- service.setMicrophoneMute(on, mApplicationContext.getOpPackageName());
+ service.setMicrophoneMute(on, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setMicrophoneMute", e);
}
@@ -1924,7 +1983,7 @@ public class AudioManager {
* Settings has an in memory cache, so this is fast.
*/
private boolean querySoundEffectsEnabled(int user) {
- return Settings.System.getIntForUser(mApplicationContext.getContentResolver(),
+ return Settings.System.getIntForUser(getContext().getContentResolver(),
Settings.System.SOUND_EFFECTS_ENABLED, 0, user) != 0;
}
@@ -2336,7 +2395,7 @@ public class AudioManager {
try {
status = service.requestAudioFocus(requestAttributes, durationHint, mICallBack,
mAudioFocusDispatcher, getIdForAudioFocusListener(l),
- mApplicationContext.getOpPackageName() /* package name */, flags,
+ getContext().getOpPackageName() /* package name */, flags,
ap != null ? ap.cb() : null);
} catch (RemoteException e) {
Log.e(TAG, "Can't call requestAudioFocus() on AudioService:", e);
@@ -2361,7 +2420,7 @@ public class AudioManager {
.setInternalLegacyStreamType(streamType).build(),
durationHint, mICallBack, null,
AudioSystem.IN_VOICE_COMM_FOCUS_ID,
- mApplicationContext.getOpPackageName(),
+ getContext().getOpPackageName(),
AUDIOFOCUS_FLAG_LOCK,
null /* policy token */);
} catch (RemoteException e) {
@@ -2430,7 +2489,7 @@ public class AudioManager {
if (eventReceiver == null) {
return;
}
- if (!eventReceiver.getPackageName().equals(mApplicationContext.getPackageName())) {
+ if (!eventReceiver.getPackageName().equals(getContext().getPackageName())) {
Log.e(TAG, "registerMediaButtonEventReceiver() error: " +
"receiver and context package names don't match");
return;
@@ -2439,7 +2498,7 @@ public class AudioManager {
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
// the associated intent will be handled by the component being registered
mediaButtonIntent.setComponent(eventReceiver);
- PendingIntent pi = PendingIntent.getBroadcast(mApplicationContext,
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
registerMediaButtonIntent(pi, eventReceiver);
}
@@ -2473,8 +2532,8 @@ public class AudioManager {
Log.e(TAG, "Cannot call registerMediaButtonIntent() with a null parameter");
return;
}
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
- helper.addMediaButtonListener(pi, eventReceiver, mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.addMediaButtonListener(pi, eventReceiver, getContext());
}
/**
@@ -2492,7 +2551,7 @@ public class AudioManager {
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
// the associated intent will be handled by the component being registered
mediaButtonIntent.setComponent(eventReceiver);
- PendingIntent pi = PendingIntent.getBroadcast(mApplicationContext,
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
unregisterMediaButtonIntent(pi);
}
@@ -2515,7 +2574,7 @@ public class AudioManager {
* @hide
*/
public void unregisterMediaButtonIntent(PendingIntent pi) {
- MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(mApplicationContext);
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
helper.removeMediaButtonListener(pi);
}
@@ -2532,7 +2591,7 @@ public class AudioManager {
if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
return;
}
- rcClient.registerWithSession(MediaSessionLegacyHelper.getHelper(mApplicationContext));
+ rcClient.registerWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
}
/**
@@ -2547,7 +2606,7 @@ public class AudioManager {
if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
return;
}
- rcClient.unregisterWithSession(MediaSessionLegacyHelper.getHelper(mApplicationContext));
+ rcClient.unregisterWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
}
/**
@@ -3091,9 +3150,8 @@ public class AudioManager {
delay = service.setBluetoothA2dpDeviceConnectionState(device, state, profile);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setBluetoothA2dpDeviceConnectionState "+e);
- } finally {
- return delay;
}
+ return delay;
}
/** {@hide} */
@@ -3120,6 +3178,20 @@ public class AudioManager {
"android.media.property.OUTPUT_FRAMES_PER_BUFFER";
/**
+ * Used as a key for {@link #getProperty} to determine if the default microphone audio source
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_MIC_NEAR_ULTRASOUND";
+
+ /**
+ * Used as a key for {@link #getProperty} to determine if the default speaker audio path
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_SPEAKER_NEAR_ULTRASOUND";
+
+ /**
* Returns the value of the property with the specified key.
* @param key One of the strings corresponding to a property key: either
* {@link #PROPERTY_OUTPUT_SAMPLE_RATE} or
@@ -3134,6 +3206,12 @@ public class AudioManager {
} else if (PROPERTY_OUTPUT_FRAMES_PER_BUFFER.equals(key)) {
int outputFramesPerBuffer = AudioSystem.getPrimaryOutputFrameCount();
return outputFramesPerBuffer > 0 ? Integer.toString(outputFramesPerBuffer) : null;
+ } else if (PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND.equals(key)) {
+ return SystemProperties.get(SYSTEM_PROPERTY_MIC_NEAR_ULTRASOUND,
+ DEFAULT_RESULT_FALSE_STRING);
+ } else if (PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND.equals(key)) {
+ return SystemProperties.get(SYSTEM_PROPERTY_SPEAKER_NEAR_ULTRASOUND,
+ DEFAULT_RESULT_FALSE_STRING);
} else {
// null or unknown key
return null;
@@ -3222,7 +3300,7 @@ public class AudioManager {
*/
public void setRingerModeInternal(int ringerMode) {
try {
- getService().setRingerModeInternal(ringerMode, mApplicationContext.getOpPackageName());
+ getService().setRingerModeInternal(ringerMode, getContext().getOpPackageName());
} catch (RemoteException e) {
Log.w(TAG, "Error calling setRingerModeInternal", e);
}
diff --git a/media/java/android/media/AudioPortEventHandler.java b/media/java/android/media/AudioPortEventHandler.java
index c05fd77..c49e8c2 100644
--- a/media/java/android/media/AudioPortEventHandler.java
+++ b/media/java/android/media/AudioPortEventHandler.java
@@ -40,6 +40,12 @@ class AudioPortEventHandler {
private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
+ /**
+ * Accessed by native methods: JNI Callback context.
+ */
+ @SuppressWarnings("unused")
+ private long mJniCallback;
+
void init() {
synchronized (this) {
if (mHandler != null) {
@@ -63,9 +69,6 @@ class AudioPortEventHandler {
listeners = mListeners;
}
}
- if (listeners.isEmpty()) {
- return;
- }
// reset audio port cache if the event corresponds to a change coming
// from audio policy service or if mediaserver process died.
if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED ||
@@ -73,6 +76,11 @@ class AudioPortEventHandler {
msg.what == AUDIOPORT_EVENT_SERVICE_DIED) {
AudioManager.resetAudioPortGeneration();
}
+
+ if (listeners.isEmpty()) {
+ return;
+ }
+
ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index 259fe37..201a796 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -16,10 +16,15 @@
package android.media;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
+import java.util.Collection;
import java.util.Iterator;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.os.Binder;
import android.os.Handler;
@@ -28,6 +33,7 @@ import android.os.Looper;
import android.os.Message;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.util.ArrayMap;
import android.util.Log;
/**
@@ -49,6 +55,12 @@ public class AudioRecord
//---------------------------------------------------------
// Constants
//--------------------
+
+ /** Minimum value for sample rate */
+ private static final int SAMPLE_RATE_HZ_MIN = 4000;
+ /** Maximum value for sample rate */
+ private static final int SAMPLE_RATE_HZ_MAX = 192000;
+
/**
* indicates AudioRecord state is not successfully initialized.
*/
@@ -103,11 +115,36 @@ public class AudioRecord
*/
private static final int NATIVE_EVENT_NEW_POS = 3;
+ /**
+ * Event id denotes when the routing changes.
+ */
+ private final static int NATIVE_EVENT_ROUTING_CHANGE = 1000;
+
private final static String TAG = "android.media.AudioRecord";
/** @hide */
public final static String SUBMIX_FIXED_VOLUME = "fixedVolume";
+ /** @hide */
+ @IntDef({
+ READ_BLOCKING,
+ READ_NON_BLOCKING
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReadMode {}
+
+ /**
+ * The read mode indicating the read operation will block until all data
+ * requested has been read.
+ */
+ public final static int READ_BLOCKING = 0;
+
+ /**
+ * The read mode indicating the read operation will return immediately after
+ * reading as much audio data as possible without blocking.
+ */
+ public final static int READ_NON_BLOCKING = 1;
+
//---------------------------------------------------------
// Used exclusively by native code
//--------------------
@@ -136,13 +173,18 @@ public class AudioRecord
*/
private int mChannelCount;
/**
- * The audio channel mask
+ * The audio channel position mask
*/
private int mChannelMask;
/**
+ * The audio channel index mask
+ */
+ private int mChannelIndexMask;
+ /**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
* @see AudioFormat#ENCODING_PCM_16BIT
+ * @see AudioFormat#ENCODING_PCM_FLOAT
*/
private int mAudioFormat;
/**
@@ -211,9 +253,9 @@ public class AudioRecord
* See {@link AudioFormat#CHANNEL_IN_MONO} and
* {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed
* to work on all devices.
- * @param audioFormat the format in which the audio data is represented.
- * See {@link AudioFormat#ENCODING_PCM_16BIT} and
- * {@link AudioFormat#ENCODING_PCM_8BIT}
+ * @param audioFormat the format in which the audio data is to be returned.
+ * See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
* @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
* to during the recording. New audio data can be read from this buffer in smaller chunks
* than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
@@ -313,8 +355,19 @@ public class AudioRecord
audioParamCheck(attributes.getCapturePreset(), rate, encoding);
- mChannelCount = AudioFormat.channelCountFromInChannelMask(format.getChannelMask());
- mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ mChannelIndexMask = format.getChannelIndexMask();
+ mChannelCount = format.getChannelCount();
+ }
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
+ mChannelCount = format.getChannelCount();
+ } else if (mChannelIndexMask == 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(AudioFormat.CHANNEL_IN_DEFAULT, false);
+ mChannelCount = AudioFormat.channelCountFromInChannelMask(mChannelMask);
+ }
audioBuffSizeCheck(bufferSizeInBytes);
@@ -323,7 +376,8 @@ public class AudioRecord
//TODO: update native initialization when information about hardware init failure
// due to capture device already open is available.
int initResult = native_setup( new WeakReference<AudioRecord>(this),
- mAudioAttributes, mSampleRate, mChannelMask, mAudioFormat, mNativeBufferSizeInBytes,
+ mAudioAttributes, mSampleRate, mChannelMask, mChannelIndexMask,
+ mAudioFormat, mNativeBufferSizeInBytes,
session);
if (initResult != SUCCESS) {
loge("Error code "+initResult+" when initializing native AudioRecord object.");
@@ -335,6 +389,170 @@ public class AudioRecord
mState = STATE_INITIALIZED;
}
+ /**
+ * Builder class for {@link AudioRecord} objects.
+ * Use this class to configure and create an <code>AudioRecord</code> instance. By setting the
+ * recording preset (a.k.a. recording source) and audio format parameters, you indicate which of
+ * those vary from the default behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioRecord</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioRecord recorder = new AudioRecord.Builder()
+ * .setCapturePreset(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(32000)
+ * .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ * .build())
+ * .setBufferSize(2*minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the capture preset is not set with {@link #setCapturePreset(int)},
+ * {@link MediaRecorder.AudioSource#DEFAULT} is used.
+ * <br>If the audio format is not specified or is incomplete, its sample rate will be the
+ * default output sample rate of the device (see
+ * {@link AudioManager#PROPERTY_OUTPUT_SAMPLE_RATE}), its channel configuration will be
+ * {@link AudioFormat#CHANNEL_IN_DEFAULT}.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * the minimum buffer size for the source is used.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * @param preset the capture preset (also referred to as the recording source).
+ * See {@link MediaRecorder.AudioSource} for the supported capture preset definitions.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setCapturePreset(int preset) throws IllegalArgumentException {
+ if ( (preset < MediaRecorder.AudioSource.DEFAULT) ||
+ (preset > MediaRecorder.getAudioSourceMax()) ) {
+ throw new IllegalArgumentException("Invalid audio source " + preset);
+ }
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(preset)
+ .build();
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components. Allows specifying non-public capture presets
+ * @param attributes a non-null {@link AudioAttributes} instance that contains the capture
+ * preset to be used.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ if (attributes.getCapturePreset() == MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID) {
+ throw new IllegalArgumentException(
+ "No valid capture preset in AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be captured.
+ * @param format a non-null {@link AudioFormat} instance
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setAudioFormat(@NonNull AudioFormat format) throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is written
+ * during the recording. New audio data can be read from this buffer in smaller chunks
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance.
+ * Since bufferSizeInBytes may be internally increased to accommodate the source
+ * requirements, use {@link #getNativeFrameCount()} to determine the actual buffer size
+ * in frames.
+ * @param bufferSizeInBytes a value strictly greater than 0
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setBufferSizeInBytes(int bufferSizeInBytes) throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components.
+ * @param sessionId ID of audio session the AudioRecord must be attached to, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at
+ * construction time.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setSessionId(int sessionId) throws IllegalArgumentException {
+ if (sessionId < 0) {
+ throw new IllegalArgumentException("Invalid session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * @return a new {@link AudioRecord} instance initialized with all the parameters set
+ * on this <code>Builder</code>
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device.
+ */
+ public AudioRecord build() throws UnsupportedOperationException {
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder().build();
+ }
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(MediaRecorder.AudioSource.DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ return new AudioRecord(mAttributes, mFormat, mBufferSizeInBytes, mSessionId);
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
// Convenience method for the constructor's parameter checks.
// This, getChannelMaskFromLegacyConfig and audioBuffSizeCheck are where constructor
// IllegalArgumentException-s are thrown
@@ -385,7 +603,7 @@ public class AudioRecord
//--------------
// sample rate
- if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
+ if ((sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX)) {
throw new IllegalArgumentException(sampleRateInHz
+ "Hz is not a supported sample rate.");
}
@@ -397,13 +615,14 @@ public class AudioRecord
case AudioFormat.ENCODING_DEFAULT:
mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
break;
+ case AudioFormat.ENCODING_PCM_FLOAT:
case AudioFormat.ENCODING_PCM_16BIT:
case AudioFormat.ENCODING_PCM_8BIT:
mAudioFormat = audioFormat;
break;
default:
throw new IllegalArgumentException("Unsupported sample encoding."
- + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
+ + " Should be ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, or ENCODING_PCM_FLOAT.");
}
}
@@ -411,7 +630,8 @@ public class AudioRecord
// Convenience method for the contructor's audio buffer size check.
// preconditions:
// mChannelCount is valid
- // mAudioFormat is AudioFormat.ENCODING_PCM_8BIT OR AudioFormat.ENCODING_PCM_16BIT
+ // mAudioFormat is AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT,
+ // or AudioFormat.ENCODING_PCM_FLOAT
// postcondition:
// mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
private void audioBuffSizeCheck(int audioBufferSize) throws IllegalArgumentException {
@@ -470,23 +690,45 @@ public class AudioRecord
}
/**
- * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
- * and {@link AudioFormat#ENCODING_PCM_8BIT}.
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
*/
public int getAudioFormat() {
return mAudioFormat;
}
/**
- * Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_IN_MONO}
+ * Returns the configured channel position mask.
+ * <p> See {@link AudioFormat#CHANNEL_IN_MONO}
* and {@link AudioFormat#CHANNEL_IN_STEREO}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask is used.
+ * Consider {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
*/
public int getChannelConfiguration() {
return mChannelMask;
}
/**
+ * Returns the configured <code>AudioRecord</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioRecord</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelMask != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelMask);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
* Returns the configured number of channels.
*/
public int getChannelCount() {
@@ -517,6 +759,20 @@ public class AudioRecord
}
/**
+ * Returns the "native frame count" of the <code>AudioRecord</code> buffer.
+ * This is greater than or equal to the bufferSizeInBytes converted to frame units
+ * specified in the <code>AudioRecord</code> constructor or Builder.
+ * The native frame count may be enlarged to accommodate the requirements of the
+ * source on creation or if the <code>AudioRecord</code>
+ * is subsequently rerouted.
+ * @return current size in frames of the <code>AudioRecord</code> buffer.
+ * @throws IllegalStateException
+ */
+ public int getNativeFrameCount() throws IllegalStateException {
+ return native_get_native_frame_count();
+ }
+
+ /**
* Returns the notification marker position expressed in frames.
*/
public int getNotificationMarkerPosition() {
@@ -570,12 +826,6 @@ public class AudioRecord
return ERROR_BAD_VALUE;
}
- // PCM_8BIT is not supported at the moment
- if (audioFormat != AudioFormat.ENCODING_PCM_16BIT) {
- loge("getMinBufferSize(): Invalid audio format.");
- return ERROR_BAD_VALUE;
- }
-
int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
if (size == 0) {
return ERROR_BAD_VALUE;
@@ -679,80 +929,217 @@ public class AudioRecord
// Audio data supply
//--------------------
/**
- * Reads audio data from the audio hardware for recording into a buffer.
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
* @param audioData the array to which the recorded audio data is written.
* @param offsetInBytes index in audioData from which the data is written expressed in bytes.
* @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of bytes will not exceed sizeInBytes.
*/
- public int read(byte[] audioData, int offsetInBytes, int sizeInBytes) {
- if (mState != STATE_INITIALIZED) {
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return read(audioData, offsetInBytes, sizeInBytes, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInBytes index in audioData from which the data is written expressed in bytes.
+ * @param sizeInBytes the number of requested bytes.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
+ */
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
|| (offsetInBytes + sizeInBytes < 0) // detect integer overflow
|| (offsetInBytes + sizeInBytes > audioData.length)) {
return ERROR_BAD_VALUE;
}
- return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes);
+ return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes,
+ readMode == READ_BLOCKING);
}
+ /**
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInShorts index in audioData from which the data is written expressed in shorts.
+ * @param sizeInShorts the number of requested shorts.
+ * @return the number of shorts that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of shorts will not exceed sizeInShorts.
+ */
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return read(audioData, offsetInShorts, sizeInShorts, READ_BLOCKING);
+ }
/**
- * Reads audio data from the audio hardware for recording into a buffer.
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
* @param audioData the array to which the recorded audio data is written.
* @param offsetInShorts index in audioData from which the data is written expressed in shorts.
* @param sizeInShorts the number of requested shorts.
- * @return the number of shorts that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of shorts that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of shorts will not exceed sizeInShorts.
*/
- public int read(short[] audioData, int offsetInShorts, int sizeInShorts) {
- if (mState != STATE_INITIALIZED) {
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
|| (offsetInShorts + sizeInShorts < 0) // detect integer overflow
|| (offsetInShorts + sizeInShorts > audioData.length)) {
return ERROR_BAD_VALUE;
}
- return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts);
+ return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts,
+ readMode == READ_BLOCKING);
}
+ /**
+ * Reads audio data from the audio hardware for recording into a float array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInFloats index in audioData from which the data is written.
+ * @param sizeInFloats the number of requested floats.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of floats that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of floats will not exceed sizeInFloats.
+ */
+ public int read(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
+ @ReadMode int readMode) {
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioRecord.read() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
+ Log.e(TAG, "AudioRecord.read(float[] ...) requires format ENCODING_PCM_FLOAT");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ((audioData == null) || (offsetInFloats < 0) || (sizeInFloats < 0)
+ || (offsetInFloats + sizeInFloats < 0) // detect integer overflow
+ || (offsetInFloats + sizeInFloats > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_float_array(audioData, offsetInFloats, sizeInFloats,
+ readMode == READ_BLOCKING);
+ }
/**
* Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
* is not a direct buffer, this method will always return 0.
* Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
* unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
* @param audioBuffer the direct buffer to which the recorded audio data is written.
- * @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of bytes will not exceed sizeInBytes.
+ * The number of bytes read will truncated to be a multiple of the frame size.
*/
- public int read(ByteBuffer audioBuffer, int sizeInBytes) {
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes) {
+ return read(audioBuffer, sizeInBytes, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
+ * is not a direct buffer, this method will always return 0.
+ * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
+ * unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
+ * @param audioBuffer the direct buffer to which the recorded audio data is written.
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return the number of bytes that were read or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
+ * The number of bytes read will truncated to be a multiple of the frame size.
+ */
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode) {
if (mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
}
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioBuffer == null) || (sizeInBytes < 0) ) {
return ERROR_BAD_VALUE;
}
- return native_read_in_direct_buffer(audioBuffer, sizeInBytes);
+ return native_read_in_direct_buffer(audioBuffer, sizeInBytes, readMode == READ_BLOCKING);
}
-
//--------------------------------------------------------------------------
// Initialization / configuration
//--------------------
@@ -769,7 +1156,7 @@ public class AudioRecord
* Sets the listener the AudioRecord notifies when a previously set marker is reached or
* for each periodic record head position update.
* Use this method to receive AudioRecord events in the Handler associated with another
- * thread than the one in which you created the AudioTrack instance.
+ * thread than the one in which you created the AudioRecord instance.
* @param listener
* @param handler the Handler that will receive the event notification messages.
*/
@@ -810,6 +1197,115 @@ public class AudioRecord
}
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioRecord.
+ */
+ public AudioDeviceInfo getRoutedDevice() {
+ return null;
+ }
+
+ /**
+ * The message sent to apps when the routing of this AudioRecord changes if they provide
+ * a {#link Handler} object to addOnAudioRecordRoutingListener().
+ */
+ private ArrayMap<OnAudioRecordRoutingListener, NativeRoutingEventHandlerDelegate>
+ mRoutingChangeListeners =
+ new ArrayMap<OnAudioRecordRoutingListener, NativeRoutingEventHandlerDelegate>();
+
+ /**
+ * Adds an {@link OnAudioRecordRoutingListener} to receive notifications of routing changes
+ * on this AudioRecord.
+ */
+ public void addOnAudioRecordRoutingListener(OnAudioRecordRoutingListener listener,
+ android.os.Handler handler) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ synchronized (mRoutingChangeListeners) {
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener, handler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link OnAudioRecordRoutingListener} which has been previously added
+ * to receive notifications of changes to the set of connected audio devices.
+ */
+ public void removeOnAudioRecordRoutingListener(OnAudioRecordRoutingListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ }
+ }
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioRecord record,
+ final OnAudioRecordRoutingListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioRecord was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (record == null) {
+ return;
+ }
+ switch(msg.what) {
+ case NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onAudioRecordRouting(record);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ Collection<NativeRoutingEventHandlerDelegate> values;
+ synchronized (mRoutingChangeListeners) {
+ values = mRoutingChangeListeners.values();
+ }
+ for(NativeRoutingEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+
/**
* Sets the period at which the listener is called, if set with
* {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
@@ -826,6 +1322,39 @@ public class AudioRecord
}
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the input to this AudioRecord.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if successful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio input device.
+ */
+ public boolean setPreferredInputDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSource()) {
+ return false;
+ }
+
+ mPreferredDevice = deviceInfo;
+ int preferredDeviceId = mPreferredDevice != null ? deviceInfo.getId() : 0;
+
+ return native_setInputDevice(preferredDeviceId);
+ }
+
+ /**
+ * Returns the selected input specified by {@link #setPreferredInputDevice}. Note that this
+ * is not guarenteed to correspond to the actual device being used for recording.
+ */
+ public AudioDeviceInfo getPreferredInputDevice() {
+ return mPreferredDevice;
+ }
+
//---------------------------------------------------------
// Interface definitions
//--------------------
@@ -921,7 +1450,7 @@ public class AudioRecord
private native final int native_setup(Object audiorecord_this,
Object /*AudioAttributes*/ attributes,
- int sampleRate, int channelMask, int audioFormat,
+ int sampleRate, int channelMask, int channelIndexMask, int audioFormat,
int buffSizeInBytes, int[] sessionId);
// TODO remove: implementation calls directly into implementation of native_release()
@@ -934,12 +1463,18 @@ public class AudioRecord
private native final void native_stop();
private native final int native_read_in_byte_array(byte[] audioData,
- int offsetInBytes, int sizeInBytes);
+ int offsetInBytes, int sizeInBytes, boolean isBlocking);
private native final int native_read_in_short_array(short[] audioData,
- int offsetInShorts, int sizeInShorts);
+ int offsetInShorts, int sizeInShorts, boolean isBlocking);
- private native final int native_read_in_direct_buffer(Object jBuffer, int sizeInBytes);
+ private native final int native_read_in_float_array(float[] audioData,
+ int offsetInFloats, int sizeInFloats, boolean isBlocking);
+
+ private native final int native_read_in_direct_buffer(Object jBuffer,
+ int sizeInBytes, boolean isBlocking);
+
+ private native final int native_get_native_frame_count();
private native final int native_set_marker_pos(int marker);
private native final int native_get_marker_pos();
@@ -950,6 +1485,8 @@ public class AudioRecord
static private native final int native_get_min_buff_size(
int sampleRateInHz, int channelCount, int audioFormat);
+ private native final boolean native_setInputDevice(int deviceId);
+
//---------------------------------------------------------
// Utility methods
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 787320e..25e6594 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -123,7 +123,7 @@ public class AudioSystem
/** @deprecated */
@Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF;
- // Keep in sync with system/core/include/system/audio.h
+ // Keep in sync with system/media/audio/include/system/audio.h
public static final int AUDIO_SESSION_ALLOCATE = 0;
/*
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 93e2cbe..6f1fd24 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -19,9 +19,15 @@ package android.media;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
+import java.lang.Math;
import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
import java.nio.NioUtils;
+import java.util.Collection;
+
import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
import android.app.ActivityThread;
import android.app.AppOpsManager;
import android.content.Context;
@@ -32,6 +38,7 @@ import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.util.ArrayMap;
import android.util.Log;
import com.android.internal.app.IAppOpsService;
@@ -89,7 +96,7 @@ public class AudioTrack
/** Minimum value for sample rate */
private static final int SAMPLE_RATE_HZ_MIN = 4000;
/** Maximum value for sample rate */
- private static final int SAMPLE_RATE_HZ_MAX = 96000;
+ private static final int SAMPLE_RATE_HZ_MAX = 192000;
/** Maximum value for AudioTrack channel count */
private static final int CHANNEL_COUNT_MAX = 8;
@@ -113,6 +120,14 @@ public class AudioTrack
*/
public static final int MODE_STREAM = 1;
+ /** @hide */
+ @IntDef({
+ MODE_STATIC,
+ MODE_STREAM
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface TransferMode {}
+
/**
* State of an AudioTrack that was not successfully initialized upon creation.
*/
@@ -163,6 +178,12 @@ public class AudioTrack
*/
private static final int NATIVE_EVENT_NEW_POS = 4;
+ /**
+ * Event id denotes when the routing changes.
+ */
+ private final static int NATIVE_EVENT_ROUTING_CHANGE = 1000;
+
+
private final static String TAG = "android.media.AudioTrack";
@@ -203,13 +224,15 @@ public class AudioTrack
private final Object mPlayStateLock = new Object();
/**
* Sizes of the native audio buffer.
+ * These values are set during construction and can be stale.
+ * To obtain the current native audio buffer frame count use {@link #getNativeFrameCount()}.
*/
private int mNativeBufferSizeInBytes = 0;
private int mNativeBufferSizeInFrames = 0;
/**
* Handler for events coming from the native code.
*/
- private NativeEventHandlerDelegate mEventHandlerDelegate;
+ private NativePositionEventHandlerDelegate mEventHandlerDelegate;
/**
* Looper associated with the thread that creates the AudioTrack instance.
*/
@@ -223,7 +246,7 @@ public class AudioTrack
*/
private int mChannelCount = 1;
/**
- * The audio channel mask.
+ * The audio channel mask used for calling native AudioTrack
*/
private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
@@ -242,10 +265,16 @@ public class AudioTrack
*/
private int mDataLoadMode = MODE_STREAM;
/**
- * The current audio channel configuration.
+ * The current channel position mask, as specified on AudioTrack creation.
+ * Can be set simultaneously with channel index mask {@link #mChannelIndexMask}.
+ * May be set to {@link AudioFormat#CHANNEL_INVALID} if a channel index mask is specified.
*/
private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
/**
+ * The current audio channel index configuration (if specified).
+ */
+ private int mChannelIndexMask = 0;
+ /**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
* @see AudioFormat#ENCODING_PCM_16BIT
@@ -260,6 +289,14 @@ public class AudioTrack
* Reference to the app-ops service.
*/
private final IAppOpsService mAppOps;
+ /**
+ * HW_AV_SYNC track AV Sync Header
+ */
+ private ByteBuffer mAvSyncHeader = null;
+ /**
+ * HW_AV_SYNC track audio data bytes remaining to write after current AV sync header
+ */
+ private int mAvSyncBytesRemaining = 0;
//--------------------------------
// Used exclusively by native code
@@ -295,15 +332,20 @@ public class AudioTrack
* {@link AudioFormat#ENCODING_PCM_8BIT},
* and {@link AudioFormat#ENCODING_PCM_FLOAT}.
* @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
- * read from for playback.
- * If track's creation mode is {@link #MODE_STREAM}, you can write data into
- * this buffer in chunks less than or equal to this size, and it is typical to use
- * chunks of 1/2 of the total size to permit double-buffering.
- * If the track's creation mode is {@link #MODE_STATIC},
+ * read from for playback. This should be a multiple of the frame size in bytes.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
* this is the maximum length sample, or audio clip, that can be played by this instance.
- * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
- * for the successful creation of an AudioTrack instance in streaming mode. Using values
- * smaller than getMinBufferSize() will result in an initialization failure.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * this should be the desired buffer size
+ * for the <code>AudioTrack</code> to satisfy the application's
+ * natural latency requirements.
+ * If <code>bufferSizeInBytes</code> is less than the
+ * minimum buffer size for the output sink, it is automatically increased to the minimum
+ * buffer size.
+ * The method {@link #getNativeFrameCount()} returns the
+ * actual size in frames of the native buffer created, which
+ * determines the frequency to write
+ * to the streaming <code>AudioTrack</code> to avoid underrun.
* @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
* @throws java.lang.IllegalArgumentException
*/
@@ -413,16 +455,24 @@ public class AudioTrack
rate = 44100;
}
}
- int channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
- if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0)
- {
+ int channelIndexMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ channelIndexMask = format.getChannelIndexMask();
+ }
+ int channelMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
channelMask = format.getChannelMask();
+ } else if (channelIndexMask == 0) { // if no masks at all, use stereo
+ channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT
+ | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
}
int encoding = AudioFormat.ENCODING_DEFAULT;
if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) {
encoding = format.getEncoding();
}
- audioParamCheck(rate, channelMask, encoding, mode);
+ audioParamCheck(rate, channelMask, channelIndexMask, encoding, mode);
mStreamType = AudioSystem.STREAM_DEFAULT;
audioBuffSizeCheck(bufferSizeInBytes);
@@ -441,7 +491,7 @@ public class AudioTrack
session[0] = sessionId;
// native initialization
int initResult = native_setup(new WeakReference<AudioTrack>(this), mAttributes,
- mSampleRate, mChannels, mAudioFormat,
+ mSampleRate, mChannels, mChannelIndexMask, mAudioFormat,
mNativeBufferSizeInBytes, mDataLoadMode, session);
if (initResult != SUCCESS) {
loge("Error code "+initResult+" when initializing AudioTrack.");
@@ -457,6 +507,188 @@ public class AudioTrack
}
}
+ /**
+ * Builder class for {@link AudioTrack} objects.
+ * Use this class to configure and create an <code>AudioTrack</code> instance. By setting audio
+ * attributes and audio format parameters, you indicate which of those vary from the default
+ * behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioTrack</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioTrack player = new AudioTrack.Builder()
+ * .setAudioAttributes(new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_ALARM)
+ * .setContentType(CONTENT_TYPE_MUSIC)
+ * .build())
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(441000)
+ * .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ * .build())
+ * .setBufferSize(minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the audio attributes are not set with {@link #setAudioAttributes(AudioAttributes)},
+ * attributes comprising {@link AudioAttributes#USAGE_MEDIA} will be used.
+ * <br>If the audio format is not specified or is incomplete, its sample rate will be the
+ * default output sample rate of the device (see
+ * {@link AudioManager#PROPERTY_OUTPUT_SAMPLE_RATE}), its channel configuration will be
+ * {@link AudioFormat#CHANNEL_OUT_STEREO} and the encoding will be
+ * {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * and the mode is {@link AudioTrack#MODE_STREAM}, the minimum buffer size is used.
+ * <br>If the transfer mode is not specified with {@link #setTransferMode(int)},
+ * <code>MODE_STREAM</code> will be used.
+ * <br>If the session ID is not specified with {@link #setSessionId(int)}, a new one will
+ * be generated.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+ private int mMode = MODE_STREAM;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Sets the {@link AudioAttributes}.
+ * @param attributes a non-null {@link AudioAttributes} instance that describes the audio
+ * data to be played.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be played by the {@link AudioTrack}.
+ * See {@link AudioFormat.Builder} for configuring the audio format parameters such
+ * as encoding, channel mask and sample rate.
+ * @param format a non-null {@link AudioFormat} instance.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioFormat(@NonNull AudioFormat format)
+ throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is read from for playback.
+ * If using the {@link AudioTrack} in streaming mode
+ * (see {@link AudioTrack#MODE_STREAM}, you can write data into this buffer in smaller
+ * chunks than this size. See {@link #getMinBufferSize(int, int, int)} to determine
+ * the minimum required buffer size for the successful creation of an AudioTrack instance
+ * in streaming mode. Using values smaller than <code>getMinBufferSize()</code> will result
+ * in an exception when trying to build the <code>AudioTrack</code>.
+ * <br>If using the <code>AudioTrack</code> in static mode (see
+ * {@link AudioTrack#MODE_STATIC}), this is the maximum size of the sound that will be
+ * played by this instance.
+ * @param bufferSizeInBytes
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setBufferSizeInBytes(int bufferSizeInBytes)
+ throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * Sets the mode under which buffers of audio data are transferred from the
+ * {@link AudioTrack} to the framework.
+ * @param mode one of {@link AudioTrack#MODE_STREAM}, {@link AudioTrack#MODE_STATIC}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setTransferMode(@TransferMode int mode)
+ throws IllegalArgumentException {
+ switch(mode) {
+ case MODE_STREAM:
+ case MODE_STATIC:
+ mMode = mode;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid transfer mode " + mode);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the session ID the {@link AudioTrack} will be attached to.
+ * @param sessionId a strictly positive ID number retrieved from another
+ * <code>AudioTrack</code> via {@link AudioTrack#getAudioSessionId()} or allocated by
+ * {@link AudioManager} via {@link AudioManager#generateAudioSessionId()}, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setSessionId(int sessionId)
+ throws IllegalArgumentException {
+ if ((sessionId != AudioManager.AUDIO_SESSION_ID_GENERATE) && (sessionId < 1)) {
+ throw new IllegalArgumentException("Invalid audio session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * Builds an {@link AudioTrack} instance initialized with all the parameters set
+ * on this <code>Builder</code>.
+ * @return a new {@link AudioTrack} instance.
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device.
+ */
+ public @NonNull AudioTrack build() throws UnsupportedOperationException {
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA)
+ .build();
+ }
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder()
+ .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ .setSampleRate(AudioSystem.getPrimaryOutputSamplingRate())
+ .setEncoding(AudioFormat.ENCODING_DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified in streaming mode,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mMode == MODE_STREAM && mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ return new AudioTrack(mAttributes, mFormat, mBufferSizeInBytes, mMode, mSessionId);
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
// mask of all the channels supported by this implementation
private static final int SUPPORTED_OUT_CHANNELS =
AudioFormat.CHANNEL_OUT_FRONT_LEFT |
@@ -477,8 +709,8 @@ public class AudioTrack
// mAudioFormat is valid
// mSampleRate is valid
// mDataLoadMode is valid
- private void audioParamCheck(int sampleRateInHz,
- int channelConfig, int audioFormat, int mode) {
+ private void audioParamCheck(int sampleRateInHz, int channelConfig, int channelIndexMask,
+ int audioFormat, int mode) {
//--------------
// sample rate, note these values are subject to change
if (sampleRateInHz < SAMPLE_RATE_HZ_MIN || sampleRateInHz > SAMPLE_RATE_HZ_MAX) {
@@ -504,6 +736,10 @@ public class AudioTrack
mChannels = AudioFormat.CHANNEL_OUT_STEREO;
break;
default:
+ if (channelConfig == AudioFormat.CHANNEL_INVALID && channelIndexMask != 0) {
+ mChannelCount = 0;
+ break; // channel index configuration only
+ }
if (!isMultichannelConfigSupported(channelConfig)) {
// input channel configuration features unsupported channels
throw new IllegalArgumentException("Unsupported channel configuration.");
@@ -511,6 +747,22 @@ public class AudioTrack
mChannels = channelConfig;
mChannelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
}
+ // check the channel index configuration (if present)
+ mChannelIndexMask = channelIndexMask;
+ if (mChannelIndexMask != 0) {
+ // restrictive: indexMask could allow up to AUDIO_CHANNEL_BITS_LOG2
+ final int indexMask = (1 << CHANNEL_COUNT_MAX) - 1;
+ if ((channelIndexMask & ~indexMask) != 0) {
+ throw new IllegalArgumentException("Unsupported channel index configuration "
+ + channelIndexMask);
+ }
+ int channelIndexCount = Integer.bitCount(channelIndexMask);
+ if (mChannelCount == 0) {
+ mChannelCount = channelIndexCount;
+ } else if (mChannelCount != channelIndexCount) {
+ throw new IllegalArgumentException("Channel count must match");
+ }
+ }
//--------------
// audio format
@@ -655,15 +907,31 @@ public class AudioTrack
}
/**
- * Returns the current playback rate in Hz.
+ * Returns the current playback sample rate rate in Hz.
*/
public int getPlaybackRate() {
return native_get_playback_rate();
}
/**
- * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
- * and {@link AudioFormat#ENCODING_PCM_8BIT}.
+ * Returns the current playback settings.
+ * See {@link #setPlaybackSettings(PlaybackSettings)} to set playback settings
+ * @return current {@link PlaybackSettings}.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public @NonNull PlaybackSettings getPlaybackSettings() {
+ float[] floatArray = new float[2];
+ int[] intArray = new int[2];
+ native_get_playback_settings(floatArray, intArray);
+ return new PlaybackSettings()
+ .setSpeed(floatArray[0])
+ .setPitch(floatArray[1])
+ .setAudioFallbackMode(intArray[0]);
+ }
+
+ /**
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
*/
public int getAudioFormat() {
return mAudioFormat;
@@ -681,15 +949,37 @@ public class AudioTrack
}
/**
- * Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_OUT_MONO}
- * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
+ * Returns the configured channel position mask.
+ * <p> For example, refer to {@link AudioFormat#CHANNEL_OUT_MONO},
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}, {@link AudioFormat#CHANNEL_OUT_5POINT1}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask is used. Consider
+ * {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
*/
public int getChannelConfiguration() {
return mChannelConfiguration;
}
/**
+ * Returns the configured <code>AudioTrack</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioTrack</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelConfiguration != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelConfiguration);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
* Returns the configured number of channels.
*/
public int getChannelCount() {
@@ -721,19 +1011,22 @@ public class AudioTrack
}
/**
- * Returns the "native frame count", derived from the bufferSizeInBytes specified at
- * creation time and converted to frame units.
- * If track's creation mode is {@link #MODE_STATIC},
- * it is equal to the specified bufferSizeInBytes converted to frame units.
- * If track's creation mode is {@link #MODE_STREAM},
- * it is typically greater than or equal to the specified bufferSizeInBytes converted to frame
- * units; it may be rounded up to a larger value if needed by the target device implementation.
- * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
- * See {@link AudioManager#getProperty(String)} for key
+ * Returns the "native frame count" of the <code>AudioTrack</code> buffer.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * it is equal to the specified bufferSizeInBytes on construction, converted to frame units.
+ * A static track's native frame count will not change.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * it is greater than or equal to the specified bufferSizeInBytes converted to frame units.
+ * For streaming tracks, this value may be rounded up to a larger value if needed by
+ * the target output sink, and
+ * if the track is subsequently routed to a different output sink, the native
+ * frame count may enlarge to accommodate.
+ * See also {@link AudioManager#getProperty(String)} for key
* {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
+ * @return current size in frames of the audio track buffer.
+ * @throws IllegalStateException
*/
- @Deprecated
- protected int getNativeFrameCount() {
+ public int getNativeFrameCount() throws IllegalStateException {
return native_get_native_frame_count();
}
@@ -820,8 +1113,7 @@ public class AudioTrack
channelCount = 2;
break;
default:
- if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
- // input channel configuration features unsupported channels
+ if (!isMultichannelConfigSupported(channelConfig)) {
loge("getMinBufferSize(): Invalid channel configuration.");
return ERROR_BAD_VALUE;
} else {
@@ -933,7 +1225,7 @@ public class AudioTrack
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
Handler handler) {
if (listener != null) {
- mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
+ mEventHandlerDelegate = new NativePositionEventHandlerDelegate(this, listener, handler);
} else {
mEventHandlerDelegate = null;
}
@@ -1014,6 +1306,7 @@ public class AudioTrack
* playback to last twice as long, but will also result in a pitch shift down by one octave.
* The valid sample rate range is from 1 Hz to twice the value returned by
* {@link #getNativeOutputSampleRate(int)}.
+ * Use {@link #setPlaybackSettings(PlaybackSettings)} for speed control.
* @param sampleRateInHz the sample rate expressed in Hz
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
@@ -1030,6 +1323,42 @@ public class AudioTrack
/**
+ * Sets the playback settings.
+ * This method returns failure if it cannot apply the playback settings.
+ * One possible cause is that the parameters for speed or pitch are out of range.
+ * Another possible cause is that the <code>AudioTrack</code> is streaming
+ * (see {@link #MODE_STREAM}) and the
+ * buffer size is too small. For speeds greater than 1.0f, the <code>AudioTrack</code> buffer
+ * on configuration must be larger than the speed multiplied by the minimum size
+ * {@link #getMinBufferSize(int, int, int)}) to allow proper playback.
+ * @param settings see {@link PlaybackSettings}. In particular,
+ * speed, pitch, and audio mode should be set.
+ * @throws IllegalArgumentException if the settings are invalid or not accepted.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public void setPlaybackSettings(@NonNull PlaybackSettings settings) {
+ if (settings == null) {
+ throw new IllegalArgumentException("settings is null");
+ }
+ float[] floatArray;
+ int[] intArray;
+ try {
+ floatArray = new float[] {
+ settings.getSpeed(),
+ settings.getPitch(),
+ };
+ intArray = new int[] {
+ settings.getAudioFallbackMode(),
+ PlaybackSettings.AUDIO_STRETCH_MODE_DEFAULT,
+ };
+ } catch (IllegalStateException e) {
+ throw new IllegalArgumentException(e);
+ }
+ native_set_playback_settings(floatArray, intArray);
+ }
+
+
+ /**
* Sets the position of the notification marker. At most one marker can be active.
* @param markerInFrames marker position in wrapping frame units similar to
* {@link #getPlaybackHeadPosition}, or zero to disable the marker.
@@ -1071,7 +1400,8 @@ public class AudioTrack
* the position values have different meanings.
* <br>
* If looping is currently enabled and the new position is greater than or equal to the
- * loop end marker, the behavior varies by API level: for API level 22 and above,
+ * loop end marker, the behavior varies by API level:
+ * as of {@link android.os.Build.VERSION_CODES#MNC},
* the looping is first disabled and then the position is set.
* For earlier API levels, the behavior is unspecified.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
@@ -1108,7 +1438,7 @@ public class AudioTrack
* {@link #ERROR_BAD_VALUE} is returned.
* The loop range is the interval [startInFrames, endInFrames).
* <br>
- * For API level 22 and above, the position is left unchanged,
+ * As of {@link android.os.Build.VERSION_CODES#MNC}, the position is left unchanged,
* unless it is greater than or equal to the loop end marker, in which case
* it is forced to the loop start marker.
* For earlier API levels, the effect on position is unspecified.
@@ -1206,6 +1536,8 @@ public class AudioTrack
synchronized(mPlayStateLock) {
native_stop();
mPlayState = PLAYSTATE_STOPPED;
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
}
}
@@ -1250,6 +1582,8 @@ public class AudioTrack
if (mState == STATE_INITIALIZED) {
// flush the data in native layer
native_flush();
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
}
}
@@ -1257,6 +1591,8 @@ public class AudioTrack
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
* In streaming mode, will block until all data has been written to the audio sink.
* In static buffer mode, copies the data to the buffer starting at offset 0.
* Note that the actual playback of this data might occur after this function
@@ -1273,13 +1609,49 @@ public class AudioTrack
* {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
* needs to be recreated.
*/
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return write(audioData, offsetInBytes, sizeInBytes, WRITE_BLOCKING);
+ }
- public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * In streaming mode, will block until all data has been written to the audio sink.
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function
+ * returns. This function is thread safe with respect to {@link #stop} calls,
+ * in which case all of the specified data might not be written to the audio sink.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
+ * starts.
+ * @param sizeInBytes the number of bytes to read in audioData after the offset.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ */
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
|| (offsetInBytes + sizeInBytes < 0) // detect integer overflow
|| (offsetInBytes + sizeInBytes > audioData.length)) {
@@ -1287,7 +1659,7 @@ public class AudioTrack
}
int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat,
- true /*isBlocking*/);
+ writeMode == WRITE_BLOCKING);
if ((mDataLoadMode == MODE_STATIC)
&& (mState == STATE_NO_STATIC_DATA)
@@ -1299,10 +1671,11 @@ public class AudioTrack
return ret;
}
-
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
* In streaming mode, will block until all data has been written to the audio sink.
* In static buffer mode, copies the data to the buffer starting at offset 0.
* Note that the actual playback of this data might occur after this function
@@ -1315,22 +1688,61 @@ public class AudioTrack
* @param sizeInShorts the number of shorts to read in audioData after the offset.
* @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
- * the parameters don't resolve to valid data and indexes.
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
*/
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return write(audioData, offsetInShorts, sizeInShorts, WRITE_BLOCKING);
+ }
- public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * In streaming mode, will block until all data has been written to the audio sink.
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function
+ * returns. This function is thread safe with respect to {@link #stop} calls,
+ * in which case all of the specified data might not be written to the audio sink.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
+ * starts.
+ * @param sizeInShorts the number of shorts to read in audioData after the offset.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ */
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
return ERROR_INVALID_OPERATION;
}
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
|| (offsetInShorts + sizeInShorts < 0) // detect integer overflow
|| (offsetInShorts + sizeInShorts > audioData.length)) {
return ERROR_BAD_VALUE;
}
- int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
+ int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
if ((mDataLoadMode == MODE_STATIC)
&& (mState == STATE_NO_STATIC_DATA)
@@ -1342,10 +1754,11 @@ public class AudioTrack
return ret;
}
-
/**
* Writes the audio data to the audio sink for playback (streaming mode),
* or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
* In static buffer mode, copies the data to the buffer starting at offset 0,
* and the write mode is ignored.
* In streaming mode, the blocking behavior will depend on the write mode.
@@ -1369,15 +1782,17 @@ public class AudioTrack
* @param sizeInFloats the number of floats to read in audioData after the offset.
* @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
* effect in static mode.
- * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
* to the audio sink.
- * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
* queuing as much audio data for playback as possible without blocking.
* @return the number of floats that were written, or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
- * the parameters don't resolve to valid data and indexes.
+ * the parameters don't resolve to valid data and indexes, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
*/
- public int write(float[] audioData, int offsetInFloats, int sizeInFloats,
+ public int write(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
@WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED) {
@@ -1436,9 +1851,11 @@ public class AudioTrack
* <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
* queuing as much audio data for playback as possible without blocking.
* @return 0 or a positive number of bytes that were written, or
- * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
*/
- public int write(ByteBuffer audioData, int sizeInBytes,
+ public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
@WriteMode int writeMode) {
if (mState == STATE_UNINITIALIZED) {
@@ -1483,17 +1900,97 @@ public class AudioTrack
}
/**
+ * Writes the audio data to the audio sink for playback (streaming mode) on a HW_AV_SYNC track.
+ * In streaming mode, the blocking behavior will depend on the write mode.
+ * @param audioData the buffer that holds the data to play, starting at the position reported
+ * by <code>audioData.position()</code>.
+ * <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
+ * have been advanced to reflect the amount of data that was successfully written to
+ * the AudioTrack.
+ * @param sizeInBytes number of bytes to write.
+ * <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}.
+ * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @param timestamp The timestamp of the first decodable audio frame in the provided audioData.
+ * @return 0 or a positive number of bytes that were written, or
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
+ * {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ */
+ public int write(ByteBuffer audioData, int sizeInBytes,
+ @WriteMode int writeMode, long timestamp) {
+
+ if ((mAttributes.getFlags() & AudioAttributes.FLAG_HW_AV_SYNC) == 0) {
+ Log.d(TAG, "AudioTrack.write() called on a regular AudioTrack. Ignoring pts...");
+ return write(audioData, sizeInBytes, writeMode);
+ }
+
+ if ((audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
+ Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
+ return ERROR_BAD_VALUE;
+ }
+
+ // create timestamp header if none exists
+ if (mAvSyncHeader == null) {
+ mAvSyncHeader = ByteBuffer.allocate(16);
+ mAvSyncHeader.order(ByteOrder.BIG_ENDIAN);
+ mAvSyncHeader.putInt(0x55550001);
+ mAvSyncHeader.putInt(sizeInBytes);
+ mAvSyncHeader.putLong(timestamp);
+ mAvSyncHeader.position(0);
+ mAvSyncBytesRemaining = sizeInBytes;
+ }
+
+ // write timestamp header if not completely written already
+ int ret = 0;
+ if (mAvSyncHeader.remaining() != 0) {
+ ret = write(mAvSyncHeader, mAvSyncHeader.remaining(), writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write timestamp header!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+ if (mAvSyncHeader.remaining() > 0) {
+ Log.v(TAG, "AudioTrack.write() partial timestamp header written.");
+ return 0;
+ }
+ }
+
+ // write audio data
+ int sizeToWrite = Math.min(mAvSyncBytesRemaining, sizeInBytes);
+ ret = write(audioData, sizeToWrite, writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write audio data!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+
+ mAvSyncBytesRemaining -= ret;
+ if (mAvSyncBytesRemaining == 0) {
+ mAvSyncHeader = null;
+ }
+
+ return ret;
+ }
+
+
+ /**
* Sets the playback head position within the static buffer to zero,
* that is it rewinds to start of static buffer.
* The track must be stopped or paused, and
* the track's creation mode must be {@link #MODE_STATIC}.
* <p>
- * For API level 22 and above, also resets the value returned by
+ * As of {@link android.os.Build.VERSION_CODES#MNC}, also resets the value returned by
* {@link #getPlaybackHeadPosition()} to zero.
* For earlier API levels, the reset behavior is unspecified.
* <p>
- * {@link #setPlaybackHeadPosition(int)} to zero
- * is recommended instead when the reset of {@link #getPlaybackHeadPosition} is not needed.
+ * Use {@link #setPlaybackHeadPosition(int)} with a zero position
+ * if the reset of <code>getPlaybackHeadPosition()</code> is not needed.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -1562,6 +2059,99 @@ public class AudioTrack
return err == 0 ? SUCCESS : ERROR;
}
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the output from this AudioTrack.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio output device.
+ */
+ public boolean setPreferredOutputDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSink()) {
+ return false;
+ }
+
+ mPreferredDevice = deviceInfo;
+ int preferredDeviceId = mPreferredDevice != null ? deviceInfo.getId() : 0;
+
+ return native_setOutputDevice(preferredDeviceId);
+ }
+
+ /**
+ * Returns the selected output specified by {@link #setPreferredOutputDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for playback.
+ */
+ public AudioDeviceInfo getPreferredOutputDevice() {
+ return mPreferredDevice;
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioTrack.
+ */
+ public AudioDeviceInfo getRoutedDevice() {
+ return null;
+ }
+
+ /**
+ * The message sent to apps when the routing of this AudioTrack changes if they provide
+ * a {#link Handler} object to addOnAudioTrackRoutingListener().
+ */
+ private ArrayMap<OnAudioTrackRoutingListener, NativeRoutingEventHandlerDelegate>
+ mRoutingChangeListeners =
+ new ArrayMap<OnAudioTrackRoutingListener, NativeRoutingEventHandlerDelegate>();
+
+ /**
+ * Adds an {@link OnAudioTrackRoutingListener} to receive notifications of routing changes
+ * on this AudioTrack.
+ */
+ public void addOnAudioTrackRoutingListener(OnAudioTrackRoutingListener listener,
+ android.os.Handler handler) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ synchronized (mRoutingChangeListeners) {
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener, handler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link OnAudioTrackRoutingListener} which has been previously added
+ * to receive notifications of changes to the set of connected audio devices.
+ */
+ public void removeOnAudioTrackRoutingListener(OnAudioTrackRoutingListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ }
+ }
+ }
+
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ Collection<NativeRoutingEventHandlerDelegate> values;
+ synchronized (mRoutingChangeListeners) {
+ values = mRoutingChangeListeners.values();
+ }
+ for(NativeRoutingEventHandlerDelegate delegate : values) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+
//---------------------------------------------------------
// Interface definitions
//--------------------
@@ -1590,10 +2180,10 @@ public class AudioTrack
* Helper class to handle the forwarding of native events to the appropriate listener
* (potentially) handled in a different thread
*/
- private class NativeEventHandlerDelegate {
+ private class NativePositionEventHandlerDelegate {
private final Handler mHandler;
- NativeEventHandlerDelegate(final AudioTrack track,
+ NativePositionEventHandlerDelegate(final AudioTrack track,
final OnPlaybackPositionUpdateListener listener,
Handler handler) {
// find the looper for our new event handler
@@ -1641,6 +2231,55 @@ public class AudioTrack
}
}
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioTrack track,
+ final OnAudioTrackRoutingListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioTrack was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (track == null) {
+ return;
+ }
+ switch(msg.what) {
+ case NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onAudioTrackRouting(track);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
//---------------------------------------------------------
// Java methods called from the native side
@@ -1654,7 +2293,7 @@ public class AudioTrack
return;
}
- NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
+ NativePositionEventHandlerDelegate delegate = track.mEventHandlerDelegate;
if (delegate != null) {
Handler handler = delegate.getHandler();
if (handler != null) {
@@ -1675,7 +2314,7 @@ public class AudioTrack
// AudioAttributes.USAGE_MEDIA will map to AudioManager.STREAM_MUSIC
private native final int native_setup(Object /*WeakReference<AudioTrack>*/ audiotrack_this,
Object /*AudioAttributes*/ attributes,
- int sampleRate, int channelMask, int audioFormat,
+ int sampleRate, int channelMask, int channelIndexMask, int audioFormat,
int buffSizeInBytes, int mode, int[] sessionId);
private native final void native_finalize();
@@ -1695,7 +2334,8 @@ public class AudioTrack
boolean isBlocking);
private native final int native_write_short(short[] audioData,
- int offsetInShorts, int sizeInShorts, int format);
+ int offsetInShorts, int sizeInShorts, int format,
+ boolean isBlocking);
private native final int native_write_float(float[] audioData,
int offsetInFloats, int sizeInFloats, int format,
@@ -1713,6 +2353,15 @@ public class AudioTrack
private native final int native_set_playback_rate(int sampleRateInHz);
private native final int native_get_playback_rate();
+ // floatArray must be a non-null array of length >= 2
+ // [0] is speed
+ // [1] is pitch
+ // intArray must be a non-null array of length >= 2
+ // [0] is audio fallback mode
+ // [1] is audio stretch mode
+ private native final void native_set_playback_settings(float[] floatArray, int[] intArray);
+ private native final void native_get_playback_settings(float[] floatArray, int[] intArray);
+
private native final int native_set_marker_pos(int marker);
private native final int native_get_marker_pos();
@@ -1738,6 +2387,8 @@ public class AudioTrack
private native final int native_attachAuxEffect(int effectId);
private native final int native_setAuxEffectSendLevel(float level);
+ private native final boolean native_setOutputDevice(int deviceId);
+
//---------------------------------------------------------
// Utility methods
//------------------
@@ -1749,5 +2400,4 @@ public class AudioTrack
private static void loge(String msg) {
Log.e(TAG, msg);
}
-
}
diff --git a/media/java/android/media/DataSource.java b/media/java/android/media/DataSource.java
deleted file mode 100644
index 347bd5f..0000000
--- a/media/java/android/media/DataSource.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package android.media;
-
-import java.io.Closeable;
-
-/**
- * An abstraction for a media data source, e.g. a file or an http stream
- * {@hide}
- */
-public interface DataSource extends Closeable {
- /**
- * Reads data from the data source at the requested position
- *
- * @param offset where in the source to read
- * @param buffer the buffer to read the data into
- * @param size how many bytes to read
- * @return the number of bytes read, or -1 if there was an error
- */
- public int readAt(long offset, byte[] buffer, int size);
-
- /**
- * Gets the size of the data source.
- *
- * @return size of data source, or -1 if the length is unknown
- */
- public long getSize();
-}
diff --git a/media/java/android/media/Image.java b/media/java/android/media/Image.java
index 53ab264..195c987 100644
--- a/media/java/android/media/Image.java
+++ b/media/java/android/media/Image.java
@@ -50,10 +50,25 @@ public abstract class Image implements AutoCloseable {
/**
* @hide
*/
+ protected boolean mIsImageValid = false;
+
+ /**
+ * @hide
+ */
protected Image() {
}
/**
+ * Throw IllegalStateException if the image is invalid (already closed).
+ *
+ * @hide
+ */
+ protected void throwISEIfImageIsInvalid() {
+ if (!mIsImageValid) {
+ throw new IllegalStateException("Image is already closed");
+ }
+ }
+ /**
* Get the format for this image. This format determines the number of
* ByteBuffers needed to represent the image, and the general layout of the
* pixel data in each in ByteBuffer.
@@ -86,6 +101,38 @@ public abstract class Image implements AutoCloseable {
* Each plane has its own row stride and pixel stride.</td>
* </tr>
* <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have half the width and the full height of the luminance
+ * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have the same width and height as that of the luminance
+ * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
+ * <td>3</td>
+ * <td>A R (red) plane followed by the G (green) and B (blue) planes.
+ * All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
+ * <td>4</td>
+ * <td>A R (red) plane followed by the G (green), B (blue), and
+ * A (alpha) planes. All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
* <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
* <td>1</td>
* <td>A single plane of raw sensor image data, with 16 bits per color
@@ -115,14 +162,51 @@ public abstract class Image implements AutoCloseable {
/**
* Get the timestamp associated with this frame.
* <p>
- * The timestamp is measured in nanoseconds, and is monotonically
- * increasing. However, the zero point and whether the timestamp can be
- * compared against other sources of time or images depend on the source of
- * this image.
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. However, the behavior of the timestamp depends on the source
+ * of this image. See {@link android.hardware.Camera Camera},
+ * {@link android.hardware.camera2.CameraDevice CameraDevice}, {@link MediaPlayer} and
+ * {@link MediaCodec} for more details.
* </p>
*/
public abstract long getTimestamp();
+ /**
+ * Set the timestamp associated with this frame.
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. However, the behavior of the timestamp depends on
+ * the destination of this image. See {@link android.hardware.Camera Camera}
+ * , {@link android.hardware.camera2.CameraDevice CameraDevice},
+ * {@link MediaPlayer} and {@link MediaCodec} for more details.
+ * </p>
+ * <p>
+ * For images dequeued from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
+ * set the timestamps correctly before sending them back to the
+ * {@link ImageWriter}, or the timestamp will be generated automatically when
+ * {@link ImageWriter#queueInputImage queueInputImage()} is called.
+ * </p>
+ *
+ * @param timestamp The timestamp to be set for this image.
+ */
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+ return;
+ }
+
+ /**
+ * <p>Check if the image is opaque.</p>
+ *
+ * <p>The pixel data of opaque images are not accessible to the application,
+ * and therefore {@link #getPlanes} will return an empty array for an opaque image.
+ * </p>
+ */
+ public boolean isOpaque() {
+ throwISEIfImageIsInvalid();
+ return false;
+ }
+
private Rect mCropRect;
/**
@@ -132,6 +216,8 @@ public abstract class Image implements AutoCloseable {
* using coordinates in the largest-resolution plane.
*/
public Rect getCropRect() {
+ throwISEIfImageIsInvalid();
+
if (mCropRect == null) {
return new Rect(0, 0, getWidth(), getHeight());
} else {
@@ -146,6 +232,8 @@ public abstract class Image implements AutoCloseable {
* using coordinates in the largest-resolution plane.
*/
public void setCropRect(Rect cropRect) {
+ throwISEIfImageIsInvalid();
+
if (cropRect != null) {
cropRect = new Rect(cropRect); // make a copy
cropRect.intersect(0, 0, getWidth(), getHeight());
@@ -155,7 +243,10 @@ public abstract class Image implements AutoCloseable {
/**
* Get the array of pixel planes for this Image. The number of planes is
- * determined by the format of the Image.
+ * determined by the format of the Image. The application will get an
+ * empty array if the image is opaque because the opaque image pixel data
+ * is not directly accessible. The application can check if an image is
+ * opaque by calling {@link Image#isOpaque}.
*/
public abstract Plane[] getPlanes();
@@ -164,14 +255,76 @@ public abstract class Image implements AutoCloseable {
* <p>
* After calling this method, calling any methods on this {@code Image} will
* result in an {@link IllegalStateException}, and attempting to read from
- * {@link ByteBuffer ByteBuffers} returned by an earlier
- * {@link Plane#getBuffer} call will have undefined behavior.
+ * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Plane#getBuffer} call will have undefined behavior. If the image
+ * was obtained from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
+ * image data filled by the application will be lost and the image will be
+ * returned to {@link ImageWriter} for reuse. Images given to
+ * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
+ * closed.
* </p>
*/
@Override
public abstract void close();
/**
+ * <p>
+ * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return true if the image is attachable to a new owner, false if the image is still attached
+ * to its current owner, or the image is a stand-alone image and is not attachable to
+ * a new owner.
+ */
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+
+ return false;
+ }
+
+ /**
+ * <p>
+ * Get the owner of the {@link Image}.
+ * </p>
+ * <p>
+ * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
+ * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
+ * if the image is stand-alone image or the owner is unknown.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return The owner of the Image.
+ */
+ Object getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return null;
+ }
+
+ /**
+ * Get native context (buffer pointer) associated with this image.
+ * <p>
+ * This is a package private method that is only used internally. It can be
+ * used to get the native buffer pointer and passed to native, which may be
+ * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
+ * JNI call.
+ * </p>
+ *
+ * @return native context associated with this Image.
+ */
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return 0;
+ }
+
+ /**
* <p>A single color plane of image data.</p>
*
* <p>The number and meaning of the planes in an Image are determined by the
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index 18ffe12..6d30208 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -27,6 +27,7 @@ import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.NioUtils;
+import java.util.concurrent.atomic.AtomicBoolean;
/**
* <p>The ImageReader class allows direct application access to image data
@@ -34,7 +35,7 @@ import java.nio.NioUtils;
*
* <p>Several Android media API classes accept Surface objects as targets to
* render to, including {@link MediaPlayer}, {@link MediaCodec},
- * {@link android.hardware.camera2.CameraDevice}, and
+ * {@link android.hardware.camera2.CameraDevice}, {@link ImageWriter} and
* {@link android.renderscript.Allocation RenderScript Allocations}. The image
* sizes and formats that can be used with each source vary, and should be
* checked in the documentation for the specific API.</p>
@@ -97,10 +98,60 @@ public class ImageReader implements AutoCloseable {
* @see Image
*/
public static ImageReader newInstance(int width, int height, int format, int maxImages) {
+ if (format == ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("To obtain an opaque ImageReader, please use"
+ + " newOpaqueInstance rather than newInstance");
+ }
return new ImageReader(width, height, format, maxImages);
}
/**
+ * <p>
+ * Create a new opaque reader for images of the desired size.
+ * </p>
+ * <p>
+ * An opaque {@link ImageReader} produces images that are not directly
+ * accessible by the application. The application can still acquire images
+ * from an opaque image reader, and send them to the
+ * {@link android.hardware.camera2.CameraDevice camera} for reprocessing via
+ * {@link ImageWriter} interface. However, the {@link Image#getPlanes()
+ * getPlanes()} will return an empty array for opaque images. The
+ * application can check if an existing reader is an opaque reader by
+ * calling {@link #isOpaque()}.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be acquired from the
+ * {@code ImageReader} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ * <p>
+ * Opaque ImageReaders are more efficient to use when application access to
+ * image data is not necessary, compared to ImageReaders using a non-opaque
+ * format such as {@link ImageFormat#YUV_420_888 YUV_420_888}.
+ * </p>
+ *
+ * @param width The default width in pixels of the Images that this reader
+ * will produce.
+ * @param height The default height in pixels of the Images that this reader
+ * will produce.
+ * @param maxImages The maximum number of images the user will want to
+ * access simultaneously. This should be as small as possible to
+ * limit memory use. Once maxImages Images are obtained by the
+ * user, one of them has to be released before a new Image will
+ * become available for access through
+ * {@link #acquireLatestImage()} or {@link #acquireNextImage()}.
+ * Must be greater than 0.
+ * @see Image
+ */
+ public static ImageReader newOpaqueInstance(int width, int height, int maxImages) {
+ return new ImageReader(width, height, ImageFormat.PRIVATE, maxImages);
+ }
+
+ /**
* @hide
*/
protected ImageReader(int width, int height, int format, int maxImages) {
@@ -197,6 +248,23 @@ public class ImageReader implements AutoCloseable {
}
/**
+ * <p>
+ * Check if the {@link ImageReader} is an opaque reader.
+ * </p>
+ * <p>
+ * An opaque image reader produces opaque images, see {@link Image#isOpaque}
+ * for more details.
+ * </p>
+ *
+ * @return true if the ImageReader is opaque.
+ * @see Image#isOpaque
+ * @see ImageReader#newOpaqueInstance
+ */
+ public boolean isOpaque() {
+ return mFormat == ImageFormat.PRIVATE;
+ }
+
+ /**
* <p>Get a {@link Surface} that can be used to produce {@link Image Images} for this
* {@code ImageReader}.</p>
*
@@ -275,7 +343,7 @@ public class ImageReader implements AutoCloseable {
* @hide
*/
public Image acquireNextImageNoThrowISE() {
- SurfaceImage si = new SurfaceImage();
+ SurfaceImage si = new SurfaceImage(mFormat);
return acquireNextSurfaceImage(si) == ACQUIRE_SUCCESS ? si : null;
}
@@ -300,7 +368,7 @@ public class ImageReader implements AutoCloseable {
switch (status) {
case ACQUIRE_SUCCESS:
si.createSurfacePlanes();
- si.setImageValid(true);
+ si.mIsImageValid = true;
case ACQUIRE_NO_BUFS:
case ACQUIRE_MAX_IMAGES:
break;
@@ -340,7 +408,9 @@ public class ImageReader implements AutoCloseable {
* @see #acquireLatestImage
*/
public Image acquireNextImage() {
- SurfaceImage si = new SurfaceImage();
+ // Initialize with reader format, but can be overwritten by native if the image
+ // format is different from the reader format.
+ SurfaceImage si = new SurfaceImage(mFormat);
int status = acquireNextSurfaceImage(si);
switch (status) {
@@ -374,7 +444,7 @@ public class ImageReader implements AutoCloseable {
si.clearSurfacePlanes();
nativeReleaseImage(i);
- si.setImageValid(false);
+ si.mIsImageValid = false;
}
/**
@@ -457,6 +527,57 @@ public class ImageReader implements AutoCloseable {
}
/**
+ * <p>
+ * Remove the ownership of this image from the ImageReader.
+ * </p>
+ * <p>
+ * After this call, the ImageReader no longer owns this image, and the image
+ * ownership can be transfered to another entity like {@link ImageWriter}
+ * via {@link ImageWriter#queueInputImage}. It's up to the new owner to
+ * release the resources held by this image. For example, if the ownership
+ * of this image is transfered to an {@link ImageWriter}, the image will be
+ * freed by the ImageWriter after the image data consumption is done.
+ * </p>
+ * <p>
+ * This method can be used to achieve zero buffer copy for use cases like
+ * {@link android.hardware.camera2.CameraDevice Camera2 API} OPAQUE and YUV
+ * reprocessing, where the application can select an output image from
+ * {@link ImageReader} and transfer this image directly to
+ * {@link ImageWriter}, where this image can be consumed by camera directly.
+ * For OPAQUE reprocessing, this is the only way to send input buffers to
+ * the {@link android.hardware.camera2.CameraDevice camera} for
+ * reprocessing.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @param image The image to be detached from this ImageReader.
+ * @throws IllegalStateException If the ImageReader or image have been
+ * closed, or the has been detached, or has not yet been
+ * acquired.
+ */
+ void detachImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("input image must not be null");
+ }
+ if (!isImageOwnedbyMe(image)) {
+ throw new IllegalArgumentException("Trying to detach an image that is not owned by"
+ + " this ImageReader");
+ }
+
+ SurfaceImage si = (SurfaceImage) image;
+ si.throwISEIfImageIsInvalid();
+
+ if (si.isAttachable()) {
+ throw new IllegalStateException("Image was already detached from this ImageReader");
+ }
+
+ nativeDetachImage(image);
+ si.setDetached(true);
+ }
+
+ /**
* Only a subset of the formats defined in
* {@link android.graphics.ImageFormat ImageFormat} and
* {@link android.graphics.PixelFormat PixelFormat} are supported by
@@ -487,12 +608,22 @@ public class ImageReader implements AutoCloseable {
case ImageFormat.DEPTH16:
case ImageFormat.DEPTH_POINT_CLOUD:
return 1;
+ case ImageFormat.PRIVATE:
+ return 0;
default:
throw new UnsupportedOperationException(
String.format("Invalid format specified %d", mFormat));
}
}
+ private boolean isImageOwnedbyMe(Image image) {
+ if (!(image instanceof SurfaceImage)) {
+ return false;
+ }
+ SurfaceImage si = (SurfaceImage) image;
+ return si.getReader() == this;
+ }
+
/**
* Called from Native code when an Event happens.
*
@@ -554,8 +685,8 @@ public class ImageReader implements AutoCloseable {
}
private class SurfaceImage extends android.media.Image {
- public SurfaceImage() {
- mIsImageValid = false;
+ public SurfaceImage(int format) {
+ mFormat = format;
}
@Override
@@ -571,56 +702,53 @@ public class ImageReader implements AutoCloseable {
@Override
public int getFormat() {
- if (mIsImageValid) {
- return ImageReader.this.mFormat;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ return mFormat;
}
@Override
public int getWidth() {
- if (mIsImageValid) {
- if (mWidth == -1) {
- mWidth = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getWidth() :
- nativeGetWidth();
- }
- return mWidth;
- } else {
- throw new IllegalStateException("Image is already released");
+ throwISEIfImageIsInvalid();
+ if (mWidth == -1) {
+ mWidth = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getWidth() :
+ nativeGetWidth(mFormat);
}
+ return mWidth;
}
@Override
public int getHeight() {
- if (mIsImageValid) {
- if (mHeight == -1) {
- mHeight = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getHeight() :
- nativeGetHeight();
- }
- return mHeight;
- } else {
- throw new IllegalStateException("Image is already released");
+ throwISEIfImageIsInvalid();
+ if (mHeight == -1) {
+ mHeight = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getHeight() :
+ nativeGetHeight(mFormat);
}
+ return mHeight;
}
@Override
public long getTimestamp() {
- if (mIsImageValid) {
- return mTimestamp;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestampNs) {
+ throwISEIfImageIsInvalid();
+ mTimestamp = timestampNs;
}
@Override
public Plane[] getPlanes() {
- if (mIsImageValid) {
- // Shallow copy is fine.
- return mPlanes.clone();
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ throwISEIfImageIsInvalid();
+ // Shallow copy is fine.
+ return mPlanes.clone();
+ }
+
+ @Override
+ public boolean isOpaque() {
+ throwISEIfImageIsInvalid();
+ return mFormat == ImageFormat.PRIVATE;
}
@Override
@@ -632,12 +760,27 @@ public class ImageReader implements AutoCloseable {
}
}
- private void setImageValid(boolean isValid) {
- mIsImageValid = isValid;
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ return mIsDetached.get();
}
- private boolean isImageValid() {
- return mIsImageValid;
+ @Override
+ ImageReader getOwner() {
+ throwISEIfImageIsInvalid();
+ return ImageReader.this;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+ return mNativeBuffer;
+ }
+
+ private void setDetached(boolean detached) {
+ throwISEIfImageIsInvalid();
+ mIsDetached.getAndSet(detached);
}
private void clearSurfacePlanes() {
@@ -667,9 +810,7 @@ public class ImageReader implements AutoCloseable {
@Override
public ByteBuffer getBuffer() {
- if (SurfaceImage.this.isImageValid() == false) {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
if (mBuffer != null) {
return mBuffer;
} else {
@@ -683,20 +824,14 @@ public class ImageReader implements AutoCloseable {
@Override
public int getPixelStride() {
- if (SurfaceImage.this.isImageValid()) {
- return mPixelStride;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ return mPixelStride;
}
@Override
public int getRowStride() {
- if (SurfaceImage.this.isImageValid()) {
- return mRowStride;
- } else {
- throw new IllegalStateException("Image is already released");
- }
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ return mRowStride;
}
private void clearBuffer() {
@@ -723,7 +858,7 @@ public class ImageReader implements AutoCloseable {
* This field is used to keep track of native object and used by native code only.
* Don't modify.
*/
- private long mLockedBuffer;
+ private long mNativeBuffer;
/**
* This field is set by native code during nativeImageSetup().
@@ -731,14 +866,16 @@ public class ImageReader implements AutoCloseable {
private long mTimestamp;
private SurfacePlane[] mPlanes;
- private boolean mIsImageValid;
private int mHeight = -1;
private int mWidth = -1;
+ private int mFormat = ImageFormat.UNKNOWN;
+ // If this image is detached from the ImageReader.
+ private AtomicBoolean mIsDetached = new AtomicBoolean(false);
private synchronized native ByteBuffer nativeImageGetBuffer(int idx, int readerFormat);
private synchronized native SurfacePlane nativeCreatePlane(int idx, int readerFormat);
- private synchronized native int nativeGetWidth();
- private synchronized native int nativeGetHeight();
+ private synchronized native int nativeGetWidth(int format);
+ private synchronized native int nativeGetHeight(int format);
}
private synchronized native void nativeInit(Object weakSelf, int w, int h,
@@ -746,6 +883,7 @@ public class ImageReader implements AutoCloseable {
private synchronized native void nativeClose();
private synchronized native void nativeReleaseImage(Image i);
private synchronized native Surface nativeGetSurface();
+ private synchronized native int nativeDetachImage(Image i);
/**
* @return A return code {@code ACQUIRE_*}
diff --git a/media/java/android/media/ImageUtils.java b/media/java/android/media/ImageUtils.java
new file mode 100644
index 0000000..89313bf
--- /dev/null
+++ b/media/java/android/media/ImageUtils.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.media.Image.Plane;
+import android.util.Size;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Package private utility class for hosting commonly used Image related methods.
+ */
+class ImageUtils {
+
+ /**
+ * Only a subset of the formats defined in
+ * {@link android.graphics.ImageFormat ImageFormat} and
+ * {@link android.graphics.PixelFormat PixelFormat} are supported by
+ * ImageReader. When reading RGB data from a surface, the formats defined in
+ * {@link android.graphics.PixelFormat PixelFormat} can be used; when
+ * reading YUV, JPEG or raw sensor data (for example, from the camera or video
+ * decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
+ * are used.
+ */
+ public static int getNumPlanesForFormat(int format) {
+ switch (format) {
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ return 3;
+ case ImageFormat.NV16:
+ return 2;
+ case PixelFormat.RGB_565:
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ case PixelFormat.RGB_888:
+ case ImageFormat.JPEG:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y8:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW10:
+ return 1;
+ case PixelFormat.OPAQUE:
+ return 0;
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid format specified %d", format));
+ }
+ }
+
+ /**
+ * <p>
+ * Copy source image data to destination Image.
+ * </p>
+ * <p>
+ * Only support the copy between two non-opaque images with same properties
+ * (format, size, etc.). The data from the source image will be copied to
+ * the byteBuffers from the destination Image starting from position zero,
+ * and the destination image will be rewound to zero after copy is done.
+ * </p>
+ *
+ * @param src The source image to be copied from.
+ * @param dst The destination image to be copied to.
+ * @throws IllegalArgumentException If the source and destination images
+ * have different format, or one of the images is not copyable.
+ */
+ public static void imageCopy(Image src, Image dst) {
+ if (src == null || dst == null) {
+ throw new IllegalArgumentException("Images should be non-null");
+ }
+ if (src.getFormat() != dst.getFormat()) {
+ throw new IllegalArgumentException("Src and dst images should have the same format");
+ }
+ if (src.isOpaque() || dst.isOpaque()) {
+ throw new IllegalArgumentException("Opaque image is not copyable");
+ }
+ if (!(dst.getOwner() instanceof ImageWriter)) {
+ throw new IllegalArgumentException("Destination image is not from ImageWriter. Only"
+ + " the images from ImageWriter are writable");
+ }
+ Size srcSize = new Size(src.getWidth(), src.getHeight());
+ Size dstSize = new Size(dst.getWidth(), dst.getHeight());
+ if (!srcSize.equals(dstSize)) {
+ throw new IllegalArgumentException("source image size " + srcSize + " is different"
+ + " with " + "destination image size " + dstSize);
+ }
+
+ Plane[] srcPlanes = src.getPlanes();
+ Plane[] dstPlanes = dst.getPlanes();
+ ByteBuffer srcBuffer = null;
+ ByteBuffer dstBuffer = null;
+ for (int i = 0; i < srcPlanes.length; i++) {
+ srcBuffer = srcPlanes[i].getBuffer();
+ int srcPos = srcBuffer.position();
+ srcBuffer.rewind();
+ dstBuffer = dstPlanes[i].getBuffer();
+ dstBuffer.rewind();
+ dstBuffer.put(srcBuffer);
+ srcBuffer.position(srcPos);
+ dstBuffer.rewind();
+ }
+ }
+}
diff --git a/media/java/android/media/ImageWriter.java b/media/java/android/media/ImageWriter.java
new file mode 100644
index 0000000..f805339
--- /dev/null
+++ b/media/java/android/media/ImageWriter.java
@@ -0,0 +1,805 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.NioUtils;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * <p>
+ * The ImageWriter class allows an application to produce Image data into a
+ * {@link android.view.Surface}, and have it be consumed by another component like
+ * {@link android.hardware.camera2.CameraDevice CameraDevice}.
+ * </p>
+ * <p>
+ * Several Android API classes can provide input {@link android.view.Surface
+ * Surface} objects for ImageWriter to produce data into, including
+ * {@link MediaCodec MediaCodec} (encoder),
+ * {@link android.hardware.camera2.CameraDevice CameraDevice} (reprocessing
+ * input), {@link ImageReader}, etc.
+ * </p>
+ * <p>
+ * The input Image data is encapsulated in {@link Image} objects. To produce
+ * Image data into a destination {@link android.view.Surface Surface}, the
+ * application can get an input Image via {@link #dequeueInputImage} then write
+ * Image data into it. Multiple such {@link Image} objects can be dequeued at
+ * the same time and queued back in any order, up to the number specified by the
+ * {@code maxImages} constructor parameter.
+ * </p>
+ * <p>
+ * If the application already has an Image from {@link ImageReader}, the
+ * application can directly queue this Image into ImageWriter (via
+ * {@link #queueInputImage}), potentially with zero buffer copies. For the opaque
+ * Images produced by an opaque ImageReader (created by
+ * {@link ImageReader#newOpaqueInstance}), this is the only way to send Image
+ * data to ImageWriter, as the Image data aren't accessible by the application.
+ * </p>
+ * Once new input Images are queued into an ImageWriter, it's up to the downstream
+ * components (e.g. {@link ImageReader} or
+ * {@link android.hardware.camera2.CameraDevice}) to consume the Images. If the
+ * downstream components cannot consume the Images at least as fast as the
+ * ImageWriter production rate, the {@link #dequeueInputImage} call will eventually
+ * block and the application will have to drop input frames. </p>
+ */
+public class ImageWriter implements AutoCloseable {
+ private final Object mListenerLock = new Object();
+ private ImageListener mListener;
+ private ListenerHandler mListenerHandler;
+ private long mNativeContext;
+
+ // Field below is used by native code, do not access or modify.
+ private int mWriterFormat;
+
+ private final int mMaxImages;
+ // Keep track of the currently dequeued Image.
+ private List<Image> mDequeuedImages = new ArrayList<Image>();
+
+ /**
+ * <p>
+ * Create a new ImageWriter.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be dequeued from the
+ * {@code ImageWriter} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary.
+ * </p>
+ * <p>
+ * The input Image size and format depend on the Surface that is provided by
+ * the downstream consumer end-point.
+ * </p>
+ *
+ * @param surface The destination Surface this writer produces Image data
+ * into.
+ * @param maxImages The maximum number of Images the user will want to
+ * access simultaneously for producing Image data. This should be
+ * as small as possible to limit memory use. Once maxImages
+ * Images are dequeued by the user, one of them has to be queued
+ * back before a new Image can be dequeued for access via
+ * {@link #dequeueInputImage()}.
+ * @return a new ImageWriter instance.
+ */
+ public static ImageWriter newInstance(Surface surface, int maxImages) {
+ return new ImageWriter(surface, maxImages);
+ }
+
+ /**
+ * @hide
+ */
+ protected ImageWriter(Surface surface, int maxImages) {
+ if (surface == null || maxImages < 1) {
+ throw new IllegalArgumentException("Illegal input argument: surface " + surface
+ + ", maxImages: " + maxImages);
+ }
+
+ mMaxImages = maxImages;
+ // Note that the underlying BufferQueue is working in synchronous mode
+ // to avoid dropping any buffers.
+ mNativeContext = nativeInit(new WeakReference<ImageWriter>(this), surface, maxImages);
+ }
+
+ /**
+ * <p>
+ * Maximum number of Images that can be dequeued from the ImageWriter
+ * simultaneously (for example, with {@link #dequeueInputImage()}).
+ * </p>
+ * <p>
+ * An Image is considered dequeued after it's returned by
+ * {@link #dequeueInputImage()} from ImageWriter, and until the Image is
+ * sent back to ImageWriter via {@link #queueInputImage}, or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * Attempting to dequeue more than {@code maxImages} concurrently will
+ * result in the {@link #dequeueInputImage()} function throwing an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @return Maximum number of Images that can be dequeued from this
+ * ImageWriter.
+ * @see #dequeueInputImage
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public int getMaxImages() {
+ return mMaxImages;
+ }
+
+ /**
+ * <p>
+ * Dequeue the next available input Image for the application to produce
+ * data into.
+ * </p>
+ * <p>
+ * This method requests a new input Image from ImageWriter. The application
+ * owns this Image after this call. Once the application fills the Image
+ * data, it is expected to return this Image back to ImageWriter for
+ * downstream consumer components (e.g.
+ * {@link android.hardware.camera2.CameraDevice}) to consume. The Image can
+ * be returned to ImageWriter via {@link #queueInputImage} or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * This call will block if all available input images have been queued by
+ * the application and the downstream consumer has not yet consumed any.
+ * When an Image is consumed by the downstream consumer and released, an
+ * {@link ImageListener#onInputImageReleased} callback will be fired, which
+ * indicates that there is one input Image available. For non-opaque formats
+ * (({@link ImageWriter#getFormat()} != {@link ImageFormat#PRIVATE})), it is
+ * recommended to dequeue the next Image only after this callback is fired,
+ * in the steady state.
+ * </p>
+ * <p>
+ * If the ImageWriter is opaque ({@link ImageWriter#getFormat()} ==
+ * {@link ImageFormat#PRIVATE}), the image buffer is inaccessible to
+ * the application, and calling this method will result in an
+ * {@link IllegalStateException}. Instead, the application should acquire
+ * opaque images from some other component (e.g. an opaque
+ * {@link ImageReader}), and queue them directly to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method.
+ * </p>
+ *
+ * @return The next available input Image from this ImageWriter.
+ * @throws IllegalStateException if {@code maxImages} Images are currently
+ * dequeued, or the ImageWriter is opaque.
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public Image dequeueInputImage() {
+ if (mWriterFormat == ImageFormat.PRIVATE) {
+ throw new IllegalStateException(
+ "Opaque ImageWriter doesn't support this operation since opaque images are"
+ + " inaccessible to the application!");
+ }
+
+ if (mDequeuedImages.size() >= mMaxImages) {
+ throw new IllegalStateException("Already dequeued max number of Images " + mMaxImages);
+ }
+ WriterSurfaceImage newImage = new WriterSurfaceImage(this);
+ nativeDequeueInputImage(mNativeContext, newImage);
+ mDequeuedImages.add(newImage);
+ newImage.mIsImageValid = true;
+ return newImage;
+ }
+
+ /**
+ * <p>
+ * Queue an input {@link Image} back to ImageWriter for the downstream
+ * consumer to access.
+ * </p>
+ * <p>
+ * The input {@link Image} could be from ImageReader (acquired via
+ * {@link ImageReader#acquireNextImage} or
+ * {@link ImageReader#acquireLatestImage}), or from this ImageWriter
+ * (acquired via {@link #dequeueInputImage}). In the former case, the Image
+ * data will be moved to this ImageWriter. Note that the Image properties
+ * (size, format, strides, etc.) must be the same as the properties of the
+ * images dequeued from this ImageWriter, or this method will throw an
+ * {@link IllegalArgumentException}. In the latter case, the application has
+ * filled the input image with data. This method then passes the filled
+ * buffer to the downstream consumer. In both cases, it's up to the caller
+ * to ensure that the Image timestamp (in nanoseconds) is correctly set, as
+ * the downstream component may want to use it to indicate the Image data
+ * capture time.
+ * </p>
+ * <p>
+ * After this method is called and the downstream consumer consumes and
+ * releases the Image, an {@link ImageListener#onInputImageReleased
+ * onInputImageReleased()} callback will fire. The application can use this
+ * callback to avoid sending Images faster than the downstream consumer
+ * processing rate in steady state.
+ * </p>
+ * <p>
+ * Passing in an Image from some other component (e.g. an
+ * {@link ImageReader}) requires a free input Image from this ImageWriter as
+ * the destination. In this case, this call will block, as
+ * {@link #dequeueInputImage} does, if there are no free Images available.
+ * To avoid blocking, the application should ensure that there is at least
+ * one free Image available in this ImageWriter before calling this method.
+ * </p>
+ * <p>
+ * After this call, the input Image is no longer valid for further access,
+ * as if the Image is {@link Image#close closed}. Attempting to access the
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will result in an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @param image The Image to be queued back to ImageWriter for future
+ * consumption.
+ * @see #dequeueInputImage()
+ */
+ public void queueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ boolean ownedByMe = isImageOwnedByMe(image);
+ if (ownedByMe && !(((WriterSurfaceImage) image).mIsImageValid)) {
+ throw new IllegalStateException("Image from ImageWriter is invalid");
+ }
+
+ // For images from other components, need to detach first, then attach.
+ if (!ownedByMe) {
+ if (!(image.getOwner() instanceof ImageReader)) {
+ throw new IllegalArgumentException("Only images from ImageReader can be queued to"
+ + " ImageWriter, other image source is not supported yet!");
+ }
+
+ ImageReader prevOwner = (ImageReader) image.getOwner();
+ // Only do the image attach for opaque images for now. Do the image
+ // copy for other formats. TODO: use attach for other formats to
+ // improve the performance, and fall back to copy when attach/detach
+ // fails. Right now, detach is guaranteed to fail as the buffer is
+ // locked when ImageReader#acquireNextImage is called. See bug 19962027.
+ if (image.isOpaque()) {
+ prevOwner.detachImage(image);
+ attachAndQueueInputImage(image);
+ // This clears the native reference held by the original owner.
+ // When this Image is detached later by this ImageWriter, the
+ // native memory won't be leaked.
+ image.close();
+ return;
+ } else {
+ Image inputImage = dequeueInputImage();
+ inputImage.setTimestamp(image.getTimestamp());
+ inputImage.setCropRect(image.getCropRect());
+ ImageUtils.imageCopy(image, inputImage);
+ image.close();
+ image = inputImage;
+ ownedByMe = true;
+ }
+ }
+
+ Rect crop = image.getCropRect();
+ nativeQueueInputImage(mNativeContext, image, image.getTimestamp(), crop.left, crop.top,
+ crop.right, crop.bottom);
+
+ /**
+ * Only remove and cleanup the Images that are owned by this
+ * ImageWriter. Images detached from other owners are only temporarily
+ * owned by this ImageWriter and will be detached immediately after they
+ * are released by downstream consumers, so there is no need to keep
+ * track of them in mDequeuedImages.
+ */
+ if (ownedByMe) {
+ mDequeuedImages.remove(image);
+ // Do not call close here, as close is essentially cancel image.
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+ }
+
+ /**
+ * Get the ImageWriter format.
+ * <p>
+ * This format may be different than the Image format returned by
+ * {@link Image#getFormat()}. However, if the ImageWriter is opaque (format
+ * == {@link ImageFormat#PRIVATE}) , the images from it will also be opaque.
+ * </p>
+ *
+ * @return The ImageWriter format.
+ */
+ public int getFormat() {
+ return mWriterFormat;
+ }
+
+ /**
+ * ImageWriter callback interface, used to to asynchronously notify the
+ * application of various ImageWriter events.
+ */
+ public interface ImageListener {
+ /**
+ * <p>
+ * Callback that is called when an input Image is released back to
+ * ImageWriter after the data consumption.
+ * </p>
+ * <p>
+ * The client can use this callback to be notified that an input Image
+ * has been consumed and released by the downstream consumer. More
+ * specifically, this callback will be fired for below cases:
+ * <li>The application dequeues an input Image via the
+ * {@link ImageWriter#dequeueInputImage dequeueInputImage()} method,
+ * uses it, and then queues it back to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method. After
+ * the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired. This image will be
+ * available to be dequeued after this callback.</li>
+ * <li>The application obtains an Image from some other component (e.g.
+ * an {@link ImageReader}), uses it, and then queues it to this
+ * ImageWriter via {@link ImageWriter#queueInputImage queueInputImage()}.
+ * After the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired.</li>
+ * </p>
+ *
+ * @param writer the ImageWriter the callback is associated with.
+ * @see ImageWriter
+ * @see Image
+ */
+ void onInputImageReleased(ImageWriter writer);
+ }
+
+ /**
+ * Register a listener to be invoked when an input Image is returned to the
+ * ImageWriter.
+ *
+ * @param listener The listener that will be run.
+ * @param handler The handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's
+ * looper.
+ * @throws IllegalArgumentException If no handler specified and the calling
+ * thread has no looper.
+ */
+ public void setImageListener(ImageListener listener, Handler handler) {
+ synchronized (mListenerLock) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "handler is null but the current thread is not a looper");
+ }
+ if (mListenerHandler == null || mListenerHandler.getLooper() != looper) {
+ mListenerHandler = new ListenerHandler(looper);
+ }
+ mListener = listener;
+ } else {
+ mListener = null;
+ mListenerHandler = null;
+ }
+ }
+ }
+
+ /**
+ * Free up all the resources associated with this ImageWriter.
+ * <p>
+ * After calling this method, this ImageWriter cannot be used. Calling any
+ * methods on this ImageWriter and Images previously provided by
+ * {@link #dequeueInputImage()} will result in an
+ * {@link IllegalStateException}, and attempting to write into
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will have undefined
+ * behavior.
+ * </p>
+ */
+ @Override
+ public void close() {
+ setImageListener(null, null);
+ for (Image image : mDequeuedImages) {
+ image.close();
+ }
+ mDequeuedImages.clear();
+ nativeClose(mNativeContext);
+ mNativeContext = 0;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * <p>
+ * Attach and queue input Image to this ImageWriter.
+ * </p>
+ * <p>
+ * When an Image is from an opaque source (e.g. an opaque ImageReader
+ * created by {@link ImageReader#newOpaqueInstance}), or the source Image is
+ * so large that copying its data is too expensive, this method can be used
+ * to migrate the source Image into ImageWriter without a data copy, and
+ * then queue it to this ImageWriter. The source Image must be detached from
+ * its previous owner already, or this call will throw an
+ * {@link IllegalStateException}.
+ * </p>
+ * <p>
+ * After this call, the ImageWriter takes ownership of this Image. This
+ * ownership will automatically be removed from this writer after the
+ * consumer releases this Image, that is, after
+ * {@link ImageListener#onInputImageReleased}. The caller is responsible for
+ * closing this Image through {@link Image#close()} to free up the resources
+ * held by this Image.
+ * </p>
+ *
+ * @param image The source Image to be attached and queued into this
+ * ImageWriter for downstream consumer to use.
+ * @throws IllegalStateException if the Image is not detached from its
+ * previous owner, or the Image is already attached to this
+ * ImageWriter, or the source Image is invalid.
+ */
+ private void attachAndQueueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ if (isImageOwnedByMe(image)) {
+ throw new IllegalArgumentException(
+ "Can not attach an image that is owned ImageWriter already");
+ }
+ /**
+ * Throw ISE if the image is not attachable, which means that it is
+ * either owned by other entity now, or completely non-attachable (some
+ * stand-alone images are not backed by native gralloc buffer, thus not
+ * attachable).
+ */
+ if (!image.isAttachable()) {
+ throw new IllegalStateException("Image was not detached from last owner, or image "
+ + " is not detachable");
+ }
+
+ // TODO: what if attach failed, throw RTE or detach a slot then attach?
+ // need do some cleanup to make sure no orphaned
+ // buffer caused leak.
+ Rect crop = image.getCropRect();
+ nativeAttachAndQueueImage(mNativeContext, image.getNativeContext(), image.getFormat(),
+ image.getTimestamp(), crop.left, crop.top, crop.right, crop.bottom);
+ }
+
+ /**
+ * This custom handler runs asynchronously so callbacks don't get queued
+ * behind UI messages.
+ */
+ private final class ListenerHandler extends Handler {
+ public ListenerHandler(Looper looper) {
+ super(looper, null, true /* async */);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ ImageListener listener;
+ synchronized (mListenerLock) {
+ listener = mListener;
+ }
+ if (listener != null) {
+ listener.onInputImageReleased(ImageWriter.this);
+ }
+ }
+ }
+
+ /**
+ * Called from Native code when an Event happens. This may be called from an
+ * arbitrary Binder thread, so access to the ImageWriter must be
+ * synchronized appropriately.
+ */
+ private static void postEventFromNative(Object selfRef) {
+ @SuppressWarnings("unchecked")
+ WeakReference<ImageWriter> weakSelf = (WeakReference<ImageWriter>) selfRef;
+ final ImageWriter iw = weakSelf.get();
+ if (iw == null) {
+ return;
+ }
+
+ final Handler handler;
+ synchronized (iw.mListenerLock) {
+ handler = iw.mListenerHandler;
+ }
+ if (handler != null) {
+ handler.sendEmptyMessage(0);
+ }
+ }
+
+ /**
+ * <p>
+ * Abort the Images that were dequeued from this ImageWriter, and return
+ * them to this writer for reuse.
+ * </p>
+ * <p>
+ * This method is used for the cases where the application dequeued the
+ * Image, may have filled the data, but does not want the downstream
+ * component to consume it. The Image will be returned to this ImageWriter
+ * for reuse after this call, and the ImageWriter will immediately have an
+ * Image available to be dequeued. This aborted Image will be invisible to
+ * the downstream consumer, as if nothing happened.
+ * </p>
+ *
+ * @param image The Image to be aborted.
+ * @see #dequeueInputImage()
+ * @see Image#close()
+ */
+ private void abortImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+
+ if (!mDequeuedImages.contains(image)) {
+ throw new IllegalStateException("It is illegal to abort some image that is not"
+ + " dequeued yet");
+ }
+
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+
+ if (!wi.mIsImageValid) {
+ throw new IllegalStateException("Image is invalid");
+ }
+
+ /**
+ * We only need abort Images that are owned and dequeued by ImageWriter.
+ * For attached Images, no need to abort, as there are only two cases:
+ * attached + queued successfully, and attach failed. Neither of the
+ * cases need abort.
+ */
+ cancelImage(mNativeContext, image);
+ mDequeuedImages.remove(image);
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+
+ private boolean isImageOwnedByMe(Image image) {
+ if (!(image instanceof WriterSurfaceImage)) {
+ return false;
+ }
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ if (wi.getOwner() != this) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private static class WriterSurfaceImage extends android.media.Image {
+ private ImageWriter mOwner;
+ // This field is used by native code, do not access or modify.
+ private long mNativeBuffer;
+ private int mNativeFenceFd = -1;
+ private SurfacePlane[] mPlanes;
+ private int mHeight = -1;
+ private int mWidth = -1;
+ private int mFormat = -1;
+ // When this default timestamp is used, timestamp for the input Image
+ // will be generated automatically when queueInputBuffer is called.
+ private final long DEFAULT_TIMESTAMP = Long.MIN_VALUE;
+ private long mTimestamp = DEFAULT_TIMESTAMP;
+
+ public WriterSurfaceImage(ImageWriter writer) {
+ mOwner = writer;
+ }
+
+ @Override
+ public int getFormat() {
+ throwISEIfImageIsInvalid();
+
+ if (mFormat == -1) {
+ mFormat = nativeGetFormat();
+ }
+ return mFormat;
+ }
+
+ @Override
+ public int getWidth() {
+ throwISEIfImageIsInvalid();
+
+ if (mWidth == -1) {
+ mWidth = nativeGetWidth();
+ }
+
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ throwISEIfImageIsInvalid();
+
+ if (mHeight == -1) {
+ mHeight = nativeGetHeight();
+ }
+
+ return mHeight;
+ }
+
+ @Override
+ public long getTimestamp() {
+ throwISEIfImageIsInvalid();
+
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+
+ mTimestamp = timestamp;
+ }
+
+ @Override
+ public boolean isOpaque() {
+ throwISEIfImageIsInvalid();
+
+ return getFormat() == ImageFormat.PRIVATE;
+ }
+
+ @Override
+ public Plane[] getPlanes() {
+ throwISEIfImageIsInvalid();
+
+ if (mPlanes == null) {
+ int numPlanes = ImageUtils.getNumPlanesForFormat(getFormat());
+ mPlanes = nativeCreatePlanes(numPlanes, getOwner().getFormat());
+ }
+
+ return mPlanes.clone();
+ }
+
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ // Don't allow Image to be detached from ImageWriter for now, as no
+ // detach API is exposed.
+ return false;
+ }
+
+ @Override
+ ImageWriter getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return mOwner;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return mNativeBuffer;
+ }
+
+ @Override
+ public void close() {
+ if (mIsImageValid) {
+ getOwner().abortImage(this);
+ }
+ }
+
+ @Override
+ protected final void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void clearSurfacePlanes() {
+ if (mIsImageValid) {
+ for (int i = 0; i < mPlanes.length; i++) {
+ if (mPlanes[i] != null) {
+ mPlanes[i].clearBuffer();
+ mPlanes[i] = null;
+ }
+ }
+ }
+ }
+
+ private class SurfacePlane extends android.media.Image.Plane {
+ private ByteBuffer mBuffer;
+ final private int mPixelStride;
+ final private int mRowStride;
+
+ // SurfacePlane instance is created by native code when a new
+ // SurfaceImage is created
+ private SurfacePlane(int rowStride, int pixelStride, ByteBuffer buffer) {
+ mRowStride = rowStride;
+ mPixelStride = pixelStride;
+ mBuffer = buffer;
+ /**
+ * Set the byteBuffer order according to host endianness (native
+ * order), otherwise, the byteBuffer order defaults to
+ * ByteOrder.BIG_ENDIAN.
+ */
+ mBuffer.order(ByteOrder.nativeOrder());
+ }
+
+ @Override
+ public int getRowStride() {
+ throwISEIfImageIsInvalid();
+ return mRowStride;
+ }
+
+ @Override
+ public int getPixelStride() {
+ throwISEIfImageIsInvalid();
+ return mPixelStride;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ throwISEIfImageIsInvalid();
+ return mBuffer;
+ }
+
+ private void clearBuffer() {
+ // Need null check first, as the getBuffer() may not be called
+ // before an Image is closed.
+ if (mBuffer == null) {
+ return;
+ }
+
+ if (mBuffer.isDirect()) {
+ NioUtils.freeDirectBuffer(mBuffer);
+ }
+ mBuffer = null;
+ }
+
+ }
+
+ // this will create the SurfacePlane object and fill the information
+ private synchronized native SurfacePlane[] nativeCreatePlanes(int numPlanes, int writerFmt);
+
+ private synchronized native int nativeGetWidth();
+
+ private synchronized native int nativeGetHeight();
+
+ private synchronized native int nativeGetFormat();
+ }
+
+ // Native implemented ImageWriter methods.
+ private synchronized native long nativeInit(Object weakSelf, Surface surface, int maxImgs);
+
+ private synchronized native void nativeClose(long nativeCtx);
+
+ private synchronized native void nativeDequeueInputImage(long nativeCtx, Image wi);
+
+ private synchronized native void nativeQueueInputImage(long nativeCtx, Image image,
+ long timestampNs, int left, int top, int right, int bottom);
+
+ private synchronized native int nativeAttachAndQueueImage(long nativeCtx,
+ long imageNativeBuffer, int imageFormat, long timestampNs, int left,
+ int top, int right, int bottom);
+
+ private synchronized native void cancelImage(long nativeCtx, Image image);
+
+ /**
+ * We use a class initializer to allow the native code to cache some field
+ * offsets.
+ */
+ private static native void nativeClassInit();
+
+ static {
+ System.loadLibrary("media_jni");
+ nativeClassInit();
+ }
+}
diff --git a/media/java/android/media/MediaCodec.java b/media/java/android/media/MediaCodec.java
index a7f33fa..d22cfda 100644
--- a/media/java/android/media/MediaCodec.java
+++ b/media/java/android/media/MediaCodec.java
@@ -16,6 +16,9 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.media.Image;
@@ -30,6 +33,8 @@ import android.os.Message;
import android.view.Surface;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ReadOnlyBufferException;
import java.util.Arrays;
@@ -253,7 +258,7 @@ final public class MediaCodec {
* {@link #BUFFER_FLAG_END_OF_STREAM}.
*/
public void set(
- int newOffset, int newSize, long newTimeUs, int newFlags) {
+ int newOffset, int newSize, long newTimeUs, @BufferFlag int newFlags) {
offset = newOffset;
size = newSize;
presentationTimeUs = newTimeUs;
@@ -293,7 +298,16 @@ final public class MediaCodec {
* be an empty buffer, whose sole purpose is to carry the end-of-stream
* marker.
*/
+ @BufferFlag
public int flags;
+
+ /** @hide */
+ @NonNull
+ public BufferInfo dup() {
+ BufferInfo copy = new BufferInfo();
+ copy.set(offset, size, presentationTimeUs, flags);
+ return copy;
+ }
};
// The follow flag constants MUST stay in sync with their equivalents
@@ -325,11 +339,37 @@ final public class MediaCodec {
*/
public static final int BUFFER_FLAG_END_OF_STREAM = 4;
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ BUFFER_FLAG_SYNC_FRAME,
+ BUFFER_FLAG_KEY_FRAME,
+ BUFFER_FLAG_CODEC_CONFIG,
+ BUFFER_FLAG_END_OF_STREAM,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface BufferFlag {}
+
+ private static class FrameRenderedInfo {
+ public long mPresentationTimeUs;
+ public long mNanoTime;
+ public FrameRenderedInfo(long presentationTimeUs, long nanoTime) {
+ mPresentationTimeUs = presentationTimeUs;
+ mNanoTime = nanoTime;
+ }
+ }
+
private EventHandler mEventHandler;
+ private EventHandler mOnFrameRenderedHandler;
+ private EventHandler mCallbackHandler;
private Callback mCallback;
+ private OnFrameRenderedListener mOnFrameRenderedListener;
+ private Object mListenerLock = new Object();
private static final int EVENT_CALLBACK = 1;
private static final int EVENT_SET_CALLBACK = 2;
+ private static final int EVENT_FRAME_RENDERED = 3;
private static final int CB_INPUT_AVAILABLE = 1;
private static final int CB_OUTPUT_AVAILABLE = 2;
@@ -339,13 +379,13 @@ final public class MediaCodec {
private class EventHandler extends Handler {
private MediaCodec mCodec;
- public EventHandler(MediaCodec codec, Looper looper) {
+ public EventHandler(@NonNull MediaCodec codec, @NonNull Looper looper) {
super(looper);
mCodec = codec;
}
@Override
- public void handleMessage(Message msg) {
+ public void handleMessage(@NonNull Message msg) {
switch (msg.what) {
case EVENT_CALLBACK:
{
@@ -357,6 +397,15 @@ final public class MediaCodec {
mCallback = (MediaCodec.Callback) msg.obj;
break;
}
+ case EVENT_FRAME_RENDERED:
+ synchronized (mListenerLock) {
+ FrameRenderedInfo info = (FrameRenderedInfo)msg.obj;
+ if (mOnFrameRenderedListener != null) {
+ mOnFrameRenderedListener.onFrameRendered(
+ mCodec, info.mPresentationTimeUs, info.mNanoTime);
+ }
+ break;
+ }
default:
{
break;
@@ -364,7 +413,7 @@ final public class MediaCodec {
}
}
- private void handleCallback(Message msg) {
+ private void handleCallback(@NonNull Message msg) {
if (mCallback == null) {
return;
}
@@ -413,6 +462,8 @@ final public class MediaCodec {
}
}
+ private boolean mHasSurface = false;
+
/**
* Instantiate a decoder supporting input data of the given mime type.
*
@@ -438,7 +489,8 @@ final public class MediaCodec {
* @throws IllegalArgumentException if type is not a valid mime type.
* @throws NullPointerException if type is null.
*/
- public static MediaCodec createDecoderByType(String type)
+ @NonNull
+ public static MediaCodec createDecoderByType(@NonNull String type)
throws IOException {
return new MediaCodec(type, true /* nameIsType */, false /* encoder */);
}
@@ -450,7 +502,8 @@ final public class MediaCodec {
* @throws IllegalArgumentException if type is not a valid mime type.
* @throws NullPointerException if type is null.
*/
- public static MediaCodec createEncoderByType(String type)
+ @NonNull
+ public static MediaCodec createEncoderByType(@NonNull String type)
throws IOException {
return new MediaCodec(type, true /* nameIsType */, true /* encoder */);
}
@@ -464,14 +517,15 @@ final public class MediaCodec {
* @throws IllegalArgumentException if name is not valid.
* @throws NullPointerException if name is null.
*/
- public static MediaCodec createByCodecName(String name)
+ @NonNull
+ public static MediaCodec createByCodecName(@NonNull String name)
throws IOException {
return new MediaCodec(
name, false /* nameIsType */, false /* unused */);
}
private MediaCodec(
- String name, boolean nameIsType, boolean encoder) {
+ @NonNull String name, boolean nameIsType, boolean encoder) {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
@@ -480,6 +534,9 @@ final public class MediaCodec {
} else {
mEventHandler = null;
}
+ mCallbackHandler = mEventHandler;
+ mOnFrameRenderedHandler = mEventHandler;
+
mBufferLock = new Object();
native_setup(name, nameIsType, encoder);
@@ -524,15 +581,26 @@ final public class MediaCodec {
*/
public static final int CONFIGURE_FLAG_ENCODE = 1;
+ /** @hide */
+ @IntDef(flag = true, value = { CONFIGURE_FLAG_ENCODE })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ConfigureFlag {}
+
/**
* Configures a component.
*
* @param format The format of the input data (decoder) or the desired
- * format of the output data (encoder).
+ * format of the output data (encoder). Passing {@code null}
+ * as {@code format} is equivalent to passing an
+ * {@link MediaFormat#MediaFormat an empty mediaformat}.
* @param surface Specify a surface on which to render the output of this
- * decoder.
+ * decoder. Pass {@code null} as {@code surface} if the
+ * codec does not generate raw video output (e.g. not a video
+ * decoder) and/or if you want to configure the codec for
+ * {@link ByteBuffer} output.
* @param crypto Specify a crypto object to facilitate secure decryption
- * of the media data.
+ * of the media data. Pass {@code null} as {@code crypto} for
+ * non-secure codecs.
* @param flags Specify {@link #CONFIGURE_FLAG_ENCODE} to configure the
* component as an encoder.
* @throws IllegalArgumentException if the surface has been released (or is invalid),
@@ -540,16 +608,18 @@ final public class MediaCodec {
* or the flags are not set properly
* (e.g. missing {@link #CONFIGURE_FLAG_ENCODE} for an encoder).
* @throws IllegalStateException if not in the Initialized state.
+ * @throws CryptoException upon DRM error.
+ * @throws CodecException upon codec error.
*/
public void configure(
- MediaFormat format,
- Surface surface, MediaCrypto crypto, int flags) {
- Map<String, Object> formatMap = format.getMap();
-
+ @Nullable MediaFormat format,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto,
+ @ConfigureFlag int flags) {
String[] keys = null;
Object[] values = null;
if (format != null) {
+ Map<String, Object> formatMap = format.getMap();
keys = new String[formatMap.size()];
values = new Object[formatMap.size()];
@@ -573,14 +643,71 @@ final public class MediaCodec {
}
}
+ mHasSurface = surface != null;
+
native_configure(keys, values, surface, crypto, flags);
}
- private native final void native_setCallback(Callback cb);
+ /**
+ * Dynamically sets the output surface of a codec.
+ * <p>
+ * This can only be used if the codec was configured with an output surface. The
+ * new output surface should have a compatible usage type to the original output surface.
+ * E.g. codecs may not support switching from a SurfaceTexture (GPU readable) output
+ * to ImageReader (software readable) output.
+ * @param surface the output surface to use. It must not be {@code null}.
+ * @throws IllegalStateException if the codec does not support setting the output
+ * surface in the current state.
+ * @throws IllegalArgumentException if the new surface is not of a suitable type for the codec.
+ */
+ public void setSurface(@NonNull Surface surface) {
+ if (!mHasSurface) {
+ throw new IllegalStateException("codec was not configured for an output surface");
+ }
+
+ // TODO implement this
+ throw new IllegalArgumentException("codec does not support this surface");
+ }
+
+ /**
+ * Create a persistent input surface that can be used with codecs that normally have an input
+ * surface, such as video encoders. A persistent input can be reused by subsequent
+ * {@link MediaCodec} or {@link MediaRecorder} instances, but can only be used by at
+ * most one codec or recorder instance concurrently.
+ * <p>
+ * The application is responsible for calling release() on the Surface when done.
+ *
+ * @return an input surface that can be used with {@link #usePersistentInputSurface}.
+ */
+ @NonNull
+ public static Surface createPersistentInputSurface() {
+ // TODO implement this
+ return new PersistentSurface();
+ }
+
+ static class PersistentSurface extends Surface {
+ PersistentSurface() {}
+ };
+
+ /**
+ * Configures the codec (e.g. encoder) to use a persistent input surface in place of input
+ * buffers. This may only be called after {@link #configure} and before {@link #start}, in
+ * lieu of {@link #createInputSurface}.
+ * @param surface a persistent input surface created by {@link #createPersistentInputSurface}
+ * @throws IllegalStateException if not in the Configured state or does not require an input
+ * surface.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link #createPersistentInputSurface}.
+ */
+ public void usePersistentInputSurface(@NonNull Surface surface) {
+ throw new IllegalArgumentException("not implemented");
+ }
+
+ private native final void native_setCallback(@Nullable Callback cb);
private native final void native_configure(
- String[] keys, Object[] values,
- Surface surface, MediaCrypto crypto, int flags);
+ @Nullable String[] keys, @Nullable Object[] values,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto, @ConfigureFlag int flags);
/**
* Requests a Surface to use as the input to an encoder, in place of input buffers. This
@@ -594,6 +721,7 @@ final public class MediaCodec {
* unexpected results.
* @throws IllegalStateException if not in the Configured state.
*/
+ @NonNull
public native final Surface createInputSurface();
/**
@@ -627,9 +755,14 @@ final public class MediaCodec {
native_stop();
freeAllTrackedBuffers();
- if (mEventHandler != null) {
- mEventHandler.removeMessages(EVENT_CALLBACK);
- mEventHandler.removeMessages(EVENT_SET_CALLBACK);
+ synchronized (mListenerLock) {
+ if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+ if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
}
}
@@ -667,9 +800,10 @@ final public class MediaCodec {
* Thrown when an internal codec error occurs.
*/
public final static class CodecException extends IllegalStateException {
- CodecException(int errorCode, int actionCode, String detailMessage) {
+ CodecException(int errorCode, int actionCode, @Nullable String detailMessage, int reason) {
super(detailMessage);
mErrorCode = errorCode;
+ mReason = reason;
mActionCode = actionCode;
// TODO get this from codec
@@ -697,6 +831,16 @@ final public class MediaCodec {
}
/**
+ * Retrieve the reason associated with a CodecException.
+ * The reason could be one of {@link #REASON_HARDWARE} or {@link #REASON_RECLAIMED}.
+ *
+ */
+ @ReasonCode
+ public int getReason() {
+ return mReason;
+ }
+
+ /**
* Retrieve the error code associated with a CodecException.
* This is opaque diagnostic information and may depend on
* hardware or API level.
@@ -713,16 +857,38 @@ final public class MediaCodec {
* since this string will not be localized or generally
* comprehensible to end-users.
*/
- public String getDiagnosticInfo() {
+ public @NonNull String getDiagnosticInfo() {
return mDiagnosticInfo;
}
+ /**
+ * This indicates the exception is caused by the hardware.
+ */
+ public static final int REASON_HARDWARE = 0;
+
+ /**
+ * This indicates the exception is because the resource manager reclaimed
+ * the media resource used by the codec.
+ * <p>
+ * With this exception, the codec must be released, as it has moved to terminal state.
+ */
+ public static final int REASON_RECLAIMED = 1;
+
+ /** @hide */
+ @IntDef({
+ REASON_HARDWARE,
+ REASON_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReasonCode {}
+
/* Must be in sync with android_media_MediaCodec.cpp */
private final static int ACTION_TRANSIENT = 1;
private final static int ACTION_RECOVERABLE = 2;
private final String mDiagnosticInfo;
private final int mErrorCode;
+ private final int mReason;
private final int mActionCode;
}
@@ -730,7 +896,7 @@ final public class MediaCodec {
* Thrown when a crypto error occurs while queueing a secure input buffer.
*/
public final static class CryptoException extends RuntimeException {
- public CryptoException(int errorCode, String detailMessage) {
+ public CryptoException(int errorCode, @Nullable String detailMessage) {
super(detailMessage);
mErrorCode = errorCode;
}
@@ -763,9 +929,20 @@ final public class MediaCodec {
*/
public static final int ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
+ /** @hide */
+ @IntDef({
+ ERROR_NO_KEY,
+ ERROR_KEY_EXPIRED,
+ ERROR_RESOURCE_BUSY,
+ ERROR_INSUFFICIENT_OUTPUT_PROTECTION,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CryptoErrorCode {}
+
/**
* Retrieve the error code associated with a CryptoException
*/
+ @CryptoErrorCode
public int getErrorCode() {
return mErrorCode;
}
@@ -859,10 +1036,10 @@ final public class MediaCodec {
public final static class CryptoInfo {
public void set(
int newNumSubSamples,
- int[] newNumBytesOfClearData,
- int[] newNumBytesOfEncryptedData,
- byte[] newKey,
- byte[] newIV,
+ @NonNull int[] newNumBytesOfClearData,
+ @NonNull int[] newNumBytesOfEncryptedData,
+ @NonNull byte[] newKey,
+ @NonNull byte[] newIV,
int newMode) {
numSubSamples = newNumSubSamples;
numBytesOfClearData = newNumBytesOfClearData;
@@ -944,7 +1121,7 @@ final public class MediaCodec {
public final void queueSecureInputBuffer(
int index,
int offset,
- CryptoInfo info,
+ @NonNull CryptoInfo info,
long presentationTimeUs,
int flags) throws CryptoException {
synchronized(mBufferLock) {
@@ -963,7 +1140,7 @@ final public class MediaCodec {
private native final void native_queueSecureInputBuffer(
int index,
int offset,
- CryptoInfo info,
+ @NonNull CryptoInfo info,
long presentationTimeUs,
int flags) throws CryptoException;
@@ -1017,6 +1194,15 @@ final public class MediaCodec {
*/
public static final int INFO_OUTPUT_BUFFERS_CHANGED = -3;
+ /** @hide */
+ @IntDef({
+ INFO_TRY_AGAIN_LATER,
+ INFO_OUTPUT_FORMAT_CHANGED,
+ INFO_OUTPUT_BUFFERS_CHANGED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface OutputBufferInfo {}
+
/**
* Dequeue an output buffer, block at most "timeoutUs" microseconds.
* Returns the index of an output buffer that has been successfully
@@ -1027,21 +1213,25 @@ final public class MediaCodec {
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @OutputBufferInfo
public final int dequeueOutputBuffer(
- BufferInfo info, long timeoutUs) {
+ @NonNull BufferInfo info, long timeoutUs) {
int res = native_dequeueOutputBuffer(info, timeoutUs);
synchronized(mBufferLock) {
if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
cacheBuffers(false /* input */);
} else if (res >= 0) {
validateOutputByteBuffer(mCachedOutputBuffers, res, info);
+ if (mHasSurface) {
+ mDequeuedOutputInfos.put(res, info.dup());
+ }
}
}
return res;
}
private native final int native_dequeueOutputBuffer(
- BufferInfo info, long timeoutUs);
+ @NonNull BufferInfo info, long timeoutUs);
/**
* If you are done with a buffer, use this call to return the buffer to
@@ -1061,13 +1251,34 @@ final public class MediaCodec {
* @throws MediaCodec.CodecException upon codec error.
*/
public final void releaseOutputBuffer(int index, boolean render) {
+ BufferInfo info = null;
synchronized(mBufferLock) {
invalidateByteBuffer(mCachedOutputBuffers, index);
mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
}
+ // TODO
+ // until codec and libgui supports callback, assume frame is rendered within 50 ms
+ postRenderedCallback(render, info, 50 /* delayMs */);
releaseOutputBuffer(index, render, false /* updatePTS */, 0 /* dummy */);
}
+ private void postRenderedCallback(boolean render, @Nullable BufferInfo info, long delayMs) {
+ if (render && info != null) {
+ synchronized (mListenerLock) {
+ if (mOnFrameRenderedListener != null) {
+ FrameRenderedInfo obj = new FrameRenderedInfo(
+ info.presentationTimeUs, System.nanoTime() + delayMs * 1000000);
+ Message msg = mOnFrameRenderedHandler.obtainMessage(
+ EVENT_FRAME_RENDERED, obj);
+ mOnFrameRenderedHandler.sendMessageDelayed(msg, delayMs);
+ }
+ }
+ }
+ }
+
/**
* If you are done with a buffer, use this call to update its surface timestamp
* and return it to the codec to render it on the output surface. If you
@@ -1118,10 +1329,20 @@ final public class MediaCodec {
* @throws MediaCodec.CodecException upon codec error.
*/
public final void releaseOutputBuffer(int index, long renderTimestampNs) {
+ BufferInfo info = null;
synchronized(mBufferLock) {
invalidateByteBuffer(mCachedOutputBuffers, index);
mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
}
+ // TODO
+ // until codec and libgui supports callback, assume frame is rendered at the
+ // render time or 16 ms from now, whichever is later.
+ postRenderedCallback(
+ true /* render */, info,
+ Math.max(renderTimestampNs - System.nanoTime(), 16666666) / 1000000);
releaseOutputBuffer(
index, true /* render */, true /* updatePTS */, renderTimestampNs);
}
@@ -1150,6 +1371,7 @@ final public class MediaCodec {
* Configured state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public final MediaFormat getOutputFormat() {
return new MediaFormat(getFormatNative(false /* input */));
}
@@ -1164,6 +1386,7 @@ final public class MediaCodec {
* Configured state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public final MediaFormat getInputFormat() {
return new MediaFormat(getFormatNative(true /* input */));
}
@@ -1177,12 +1400,15 @@ final public class MediaCodec {
* @return the format for the output buffer, or null if the index
* is not a dequeued output buffer.
*/
+ @NonNull
public final MediaFormat getOutputFormat(int index) {
return new MediaFormat(getOutputFormatNative(index));
}
+ @NonNull
private native final Map<String, Object> getFormatNative(boolean input);
+ @NonNull
private native final Map<String, Object> getOutputFormatNative(int index);
// used to track dequeued buffers
@@ -1204,12 +1430,12 @@ final public class MediaCodec {
}
}
- public void setImage(Image image) {
+ public void setImage(@Nullable Image image) {
free();
mImage = image;
}
- public void setByteBuffer(ByteBuffer buffer) {
+ public void setByteBuffer(@Nullable ByteBuffer buffer) {
free();
mByteBuffer = buffer;
}
@@ -1226,7 +1452,7 @@ final public class MediaCodec {
}
}
- public void put(int index, ByteBuffer newBuffer) {
+ public void put(int index, @Nullable ByteBuffer newBuffer) {
CodecBuffer buffer = mMap.get(index);
if (buffer == null) { // likely
buffer = new CodecBuffer();
@@ -1235,7 +1461,7 @@ final public class MediaCodec {
buffer.setByteBuffer(newBuffer);
}
- public void put(int index, Image newImage) {
+ public void put(int index, @Nullable Image newImage) {
CodecBuffer buffer = mMap.get(index);
if (buffer == null) { // likely
buffer = new CodecBuffer();
@@ -1256,10 +1482,12 @@ final public class MediaCodec {
private ByteBuffer[] mCachedOutputBuffers;
private final BufferMap mDequeuedInputBuffers = new BufferMap();
private final BufferMap mDequeuedOutputBuffers = new BufferMap();
+ private final Map<Integer, BufferInfo> mDequeuedOutputInfos =
+ new HashMap<Integer, BufferInfo>();
final private Object mBufferLock;
private final void invalidateByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1269,7 +1497,7 @@ final public class MediaCodec {
}
private final void validateInputByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1280,7 +1508,7 @@ final public class MediaCodec {
}
private final void revalidateByteBuffer(
- ByteBuffer[] buffers, int index) {
+ @Nullable ByteBuffer[] buffers, int index) {
synchronized(mBufferLock) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
@@ -1292,7 +1520,7 @@ final public class MediaCodec {
}
private final void validateOutputByteBuffer(
- ByteBuffer[] buffers, int index, BufferInfo info) {
+ @Nullable ByteBuffer[] buffers, int index, @NonNull BufferInfo info) {
if (buffers != null && index >= 0 && index < buffers.length) {
ByteBuffer buffer = buffers[index];
if (buffer != null) {
@@ -1302,7 +1530,7 @@ final public class MediaCodec {
}
}
- private final void invalidateByteBuffers(ByteBuffer[] buffers) {
+ private final void invalidateByteBuffers(@Nullable ByteBuffer[] buffers) {
if (buffers != null) {
for (ByteBuffer buffer: buffers) {
if (buffer != null) {
@@ -1312,14 +1540,14 @@ final public class MediaCodec {
}
}
- private final void freeByteBuffer(ByteBuffer buffer) {
+ private final void freeByteBuffer(@Nullable ByteBuffer buffer) {
if (buffer != null /* && buffer.isDirect() */) {
// all of our ByteBuffers are direct
java.nio.NioUtils.freeDirectBuffer(buffer);
}
}
- private final void freeByteBuffers(ByteBuffer[] buffers) {
+ private final void freeByteBuffers(@Nullable ByteBuffer[] buffers) {
if (buffers != null) {
for (ByteBuffer buffer: buffers) {
freeByteBuffer(buffer);
@@ -1362,13 +1590,14 @@ final public class MediaCodec {
* @deprecated Use the new {@link #getInputBuffer} method instead
* each time an input buffer is dequeued.
*
- * <b>Note:</b>As of API 21, dequeued input buffers are
+ * <b>Note:</b> As of API 21, dequeued input buffers are
* automatically {@link java.nio.Buffer#clear cleared}.
*
* @throws IllegalStateException if not in the Executing state,
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public ByteBuffer[] getInputBuffers() {
if (mCachedInputBuffers == null) {
throw new IllegalStateException();
@@ -1389,7 +1618,7 @@ final public class MediaCodec {
* each time an output buffer is dequeued. This method is not
* supported if codec is configured in asynchronous mode.
*
- * <b>Note:</b>As of API 21, the position and limit of output
+ * <b>Note:</b> As of API 21, the position and limit of output
* buffers that are dequeued will be set to the valid data
* range.
*
@@ -1397,6 +1626,7 @@ final public class MediaCodec {
* or codec is configured in asynchronous mode.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @NonNull
public ByteBuffer[] getOutputBuffers() {
if (mCachedOutputBuffers == null) {
throw new IllegalStateException();
@@ -1423,6 +1653,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public ByteBuffer getInputBuffer(int index) {
ByteBuffer newBuffer = getBuffer(true /* input */, index);
synchronized(mBufferLock) {
@@ -1451,6 +1682,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public Image getInputImage(int index) {
Image newImage = getImage(true /* input */, index);
synchronized(mBufferLock) {
@@ -1479,6 +1711,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public ByteBuffer getOutputBuffer(int index) {
ByteBuffer newBuffer = getBuffer(false /* input */, index);
synchronized(mBufferLock) {
@@ -1506,6 +1739,7 @@ final public class MediaCodec {
* @throws IllegalStateException if not in the Executing state.
* @throws MediaCodec.CodecException upon codec error.
*/
+ @Nullable
public Image getOutputImage(int index) {
Image newImage = getImage(false /* input */, index);
synchronized(mBufferLock) {
@@ -1526,19 +1760,28 @@ final public class MediaCodec {
*/
public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+ /** @hide */
+ @IntDef({
+ VIDEO_SCALING_MODE_SCALE_TO_FIT,
+ VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoScalingMode {}
+
/**
* If a surface has been specified in a previous call to {@link #configure}
* specifies the scaling mode to use. The default is "scale to fit".
* @throws IllegalArgumentException if mode is not recognized.
* @throws IllegalStateException if in the Uninitialized state.
*/
- public native final void setVideoScalingMode(int mode);
+ public native final void setVideoScalingMode(@VideoScalingMode int mode);
/**
* Get the component name. If the codec was created by createDecoderByType
* or createEncoderByType, what component is chosen is not known beforehand.
* @throws IllegalStateException if in the Uninitialized state.
*/
+ @NonNull
public native final String getName();
/**
@@ -1566,9 +1809,12 @@ final public class MediaCodec {
/**
* Communicate additional parameter changes to the component instance.
+ * <b>Note:</b> Some of these parameter changes may silently fail to apply.
+ *
+ * @param params The bundle of parameters to set.
* @throws IllegalStateException if in the Uninitialized state.
*/
- public final void setParameters(Bundle params) {
+ public final void setParameters(@Nullable Bundle params) {
if (params == null) {
return;
}
@@ -1600,22 +1846,124 @@ final public class MediaCodec {
* {@code flush}, you must call {@link #start} to "resume" receiving input buffers,
* even if an input surface was created.
*
- * @param cb The callback that will run.
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @param handler Callbacks will happen on the handler's thread. If {@code null},
+ * callbacks are done on the default thread (the caller's thread or the
+ * main thread.)
*/
- public void setCallback(/* MediaCodec. */ Callback cb) {
- if (mEventHandler != null) {
- // set java callback on handler
- Message msg = mEventHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb);
- mEventHandler.sendMessage(msg);
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb, @Nullable Handler handler) {
+ if (cb != null) {
+ synchronized (mListenerLock) {
+ EventHandler newHandler = getEventHandlerOn(handler, mCallbackHandler);
+ // NOTE: there are no callbacks on the handler at this time, but check anyways
+ // even if we were to extend this to be callable dynamically, it must
+ // be called when codec is flushed, so no messages are pending.
+ if (newHandler != mCallbackHandler) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ mCallbackHandler = newHandler;
+ }
+ }
+ } else if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+
+ if (mCallbackHandler != null) {
+ // set java callback on main handler
+ Message msg = mCallbackHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb);
+ mCallbackHandler.sendMessage(msg);
// set native handler here, don't post to handler because
- // it may cause the callback to be delayed and set in a wrong state,
- // and MediaCodec is already doing it on looper.
+ // it may cause the callback to be delayed and set in a wrong state.
+ // Note that native codec may start sending events to the callback
+ // handler after this returns.
native_setCallback(cb);
}
}
/**
+ * Sets an asynchronous callback for actionable MediaCodec events on the default
+ * looper.
+ * <p>
+ * Same as {@link #setCallback(Callback, Handler)} with handler set to null.
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @see #setCallback(Callback, Handler)
+ */
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb) {
+ setCallback(cb, null /* handler */);
+ }
+
+ /**
+ * Listener to be called when an output frame has rendered on the output surface
+ *
+ * @see MediaCodec#setOnFrameRenderedListener
+ */
+ public interface OnFrameRenderedListener {
+
+ /**
+ * Called when an output frame has rendered on the output surface.
+ *
+ * @param codec the MediaCodec instance
+ * @param presentationTimeUs the presentation time (media time) of the frame rendered.
+ * This is usually the same as specified in {@link #queueInputBuffer}; however,
+ * some codecs may alter the media time by applying some time-based transformation,
+ * such as frame rate conversion. In that case, presentation time corresponds
+ * to the actual output frame rendered.
+ * @param nanoTime The system time when the frame was rendered.
+ *
+ * @see System#nanoTime
+ */
+ public void onFrameRendered(
+ @NonNull MediaCodec codec, long presentationTimeUs, long nanoTime);
+ }
+
+ /**
+ * Register a callback to be invoked when an output frame is rendered on the output surface.
+ * <p>
+ * This method can be called in any codec state, but will only have an effect in the
+ * Executing state for codecs that render buffers to the output surface.
+ *
+ * @param listener the callback that will be run
+ * @param handler the callback will be run on the handler's thread. If {@code null},
+ * the callback will be run on the default thread, which is the looper
+ * from which the codec was created, or a new thread if there was none.
+ */
+ public void setOnFrameRenderedListener(
+ @Nullable OnFrameRenderedListener listener, @Nullable Handler handler) {
+ synchronized (mListenerLock) {
+ mOnFrameRenderedListener = listener;
+ if (listener != null) {
+ EventHandler newHandler = getEventHandlerOn(handler, mOnFrameRenderedHandler);
+ if (newHandler != mOnFrameRenderedHandler) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ mOnFrameRenderedHandler = newHandler;
+ } else if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ }
+ }
+
+ private EventHandler getEventHandlerOn(
+ @Nullable Handler handler, @NonNull EventHandler lastHandler) {
+ if (handler == null) {
+ return mEventHandler;
+ } else {
+ Looper looper = handler.getLooper();
+ if (lastHandler.getLooper() == looper) {
+ return lastHandler;
+ } else {
+ return new EventHandler(this, looper);
+ }
+ }
+ }
+
+ /**
* MediaCodec callback interface. Used to notify the user asynchronously
* of various MediaCodec events.
*/
@@ -1626,7 +1974,7 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param index The index of the available input buffer.
*/
- public abstract void onInputBufferAvailable(MediaCodec codec, int index);
+ public abstract void onInputBufferAvailable(@NonNull MediaCodec codec, int index);
/**
* Called when an output buffer becomes available.
@@ -1635,7 +1983,8 @@ final public class MediaCodec {
* @param index The index of the available output buffer.
* @param info Info regarding the available output buffer {@link MediaCodec.BufferInfo}.
*/
- public abstract void onOutputBufferAvailable(MediaCodec codec, int index, BufferInfo info);
+ public abstract void onOutputBufferAvailable(
+ @NonNull MediaCodec codec, int index, @NonNull BufferInfo info);
/**
* Called when the MediaCodec encountered an error
@@ -1643,7 +1992,7 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param e The {@link MediaCodec.CodecException} object describing the error.
*/
- public abstract void onError(MediaCodec codec, CodecException e);
+ public abstract void onError(@NonNull MediaCodec codec, @NonNull CodecException e);
/**
* Called when the output format has changed
@@ -1651,18 +2000,27 @@ final public class MediaCodec {
* @param codec The MediaCodec object.
* @param format The new output format.
*/
- public abstract void onOutputFormatChanged(MediaCodec codec, MediaFormat format);
+ public abstract void onOutputFormatChanged(
+ @NonNull MediaCodec codec, @NonNull MediaFormat format);
}
private void postEventFromNative(
- int what, int arg1, int arg2, Object obj) {
- if (mEventHandler != null) {
- Message msg = mEventHandler.obtainMessage(what, arg1, arg2, obj);
- mEventHandler.sendMessage(msg);
+ int what, int arg1, int arg2, @Nullable Object obj) {
+ synchronized (mListenerLock) {
+ EventHandler handler = mEventHandler;
+ if (what == EVENT_CALLBACK) {
+ handler = mCallbackHandler;
+ } else if (what == EVENT_FRAME_RENDERED) {
+ handler = mOnFrameRenderedHandler;
+ }
+ if (handler != null) {
+ Message msg = handler.obtainMessage(what, arg1, arg2, obj);
+ handler.sendMessage(msg);
+ }
}
}
- private native final void setParameters(String[] keys, Object[] values);
+ private native final void setParameters(@NonNull String[] keys, @NonNull Object[] values);
/**
* Get the codec info. If the codec was created by createDecoderByType
@@ -1670,20 +2028,24 @@ final public class MediaCodec {
* and thus the caller does not have the MediaCodecInfo.
* @throws IllegalStateException if in the Uninitialized state.
*/
+ @NonNull
public MediaCodecInfo getCodecInfo() {
return MediaCodecList.getInfoFor(getName());
}
+ @NonNull
private native final ByteBuffer[] getBuffers(boolean input);
+ @Nullable
private native final ByteBuffer getBuffer(boolean input, int index);
+ @Nullable
private native final Image getImage(boolean input, int index);
private static native final void native_init();
private native final void native_setup(
- String name, boolean nameIsType, boolean encoder);
+ @NonNull String name, boolean nameIsType, boolean encoder);
private native final void native_finalize();
@@ -1697,7 +2059,6 @@ final public class MediaCodec {
/** @hide */
public static class MediaImage extends Image {
private final boolean mIsReadOnly;
- private boolean mIsValid;
private final int mWidth;
private final int mHeight;
private final int mFormat;
@@ -1710,35 +2071,42 @@ final public class MediaCodec {
private final static int TYPE_YUV = 1;
+ @Override
public int getFormat() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mFormat;
}
+ @Override
public int getHeight() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mHeight;
}
+ @Override
public int getWidth() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mWidth;
}
+ @Override
public long getTimestamp() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mTimestamp;
}
+ @Override
+ @NonNull
public Plane[] getPlanes() {
- checkValid();
+ throwISEIfImageIsInvalid();
return Arrays.copyOf(mPlanes, mPlanes.length);
}
+ @Override
public void close() {
- if (mIsValid) {
+ if (mIsImageValid) {
java.nio.NioUtils.freeDirectBuffer(mBuffer);
- mIsValid = false;
+ mIsImageValid = false;
}
}
@@ -1748,20 +2116,16 @@ final public class MediaCodec {
* The crop rectangle specifies the region of valid pixels in the image,
* using coordinates in the largest-resolution plane.
*/
- public void setCropRect(Rect cropRect) {
+ @Override
+ public void setCropRect(@Nullable Rect cropRect) {
if (mIsReadOnly) {
throw new ReadOnlyBufferException();
}
super.setCropRect(cropRect);
}
- private void checkValid() {
- if (!mIsValid) {
- throw new IllegalStateException("Image is already released");
- }
- }
- private int readInt(ByteBuffer buffer, boolean asLong) {
+ private int readInt(@NonNull ByteBuffer buffer, boolean asLong) {
if (asLong) {
return (int)buffer.getLong();
} else {
@@ -1770,11 +2134,11 @@ final public class MediaCodec {
}
public MediaImage(
- ByteBuffer buffer, ByteBuffer info, boolean readOnly,
- long timestamp, int xOffset, int yOffset, Rect cropRect) {
+ @NonNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly,
+ long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect) {
mFormat = ImageFormat.YUV_420_888;
mTimestamp = timestamp;
- mIsValid = true;
+ mIsImageValid = true;
mIsReadOnly = buffer.isReadOnly();
mBuffer = buffer.duplicate();
@@ -1837,7 +2201,7 @@ final public class MediaCodec {
}
private class MediaPlane extends Plane {
- public MediaPlane(ByteBuffer buffer, int rowInc, int colInc) {
+ public MediaPlane(@NonNull ByteBuffer buffer, int rowInc, int colInc) {
mData = buffer;
mRowInc = rowInc;
mColInc = colInc;
@@ -1845,19 +2209,20 @@ final public class MediaCodec {
@Override
public int getRowStride() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mRowInc;
}
@Override
public int getPixelStride() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mColInc;
}
@Override
+ @NonNull
public ByteBuffer getBuffer() {
- checkValid();
+ throwISEIfImageIsInvalid();
return mData;
}
diff --git a/media/java/android/media/MediaCodecInfo.java b/media/java/android/media/MediaCodecInfo.java
index ebf73da..974c9af 100644
--- a/media/java/android/media/MediaCodecInfo.java
+++ b/media/java/android/media/MediaCodecInfo.java
@@ -124,6 +124,8 @@ public final class MediaCodecInfo {
private static final Range<Integer> SIZE_RANGE = Range.create(1, 32768);
private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960);
private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000);
+ private static final int DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
+ private static final int MAX_SUPPORTED_INSTANCES_LIMIT = 256;
// found stuff that is not supported by framework (=> this should not happen)
private static final int ERROR_UNRECOGNIZED = (1 << 0);
@@ -147,6 +149,7 @@ public final class MediaCodecInfo {
// CLASSIFICATION
private String mMime;
+ private int mMaxSupportedInstances;
// LEGACY FIELDS
@@ -157,57 +160,274 @@ public final class MediaCodecInfo {
public CodecProfileLevel[] profileLevels; // NOTE this array is modifiable by user
// from OMX_COLOR_FORMATTYPE
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_FormatMonochrome = 1;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format8bitRGB332 = 2;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format12bitRGB444 = 3;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format16bitARGB4444 = 4;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format16bitARGB1555 = 5;
+
+ /**
+ * 16 bits per pixel RGB color format, with 5-bit red & blue and 6-bit green component.
+ * <p>
+ * Using 16-bit little-endian representation, colors stored as Red 15:11, Green 10:5, Blue 4:0.
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | BLUE | GREEN | RED |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 4 5 7 0 2 3 7
+ * bit
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_565} and
+ * {@link android.graphics.ImageFormat#RGB_565}.
+ */
public static final int COLOR_Format16bitRGB565 = 6;
+ /** @deprecated Use {@link #COLOR_Format16bitRGB565}. */
public static final int COLOR_Format16bitBGR565 = 7;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format18bitRGB666 = 8;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format18bitARGB1665 = 9;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format19bitARGB1666 = 10;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888} or {@link #COLOR_FormatRGBFlexible}. */
public static final int COLOR_Format24bitRGB888 = 11;
+
+ /**
+ * 24 bits per pixel RGB color format, with 8-bit red, green & blue components.
+ * <p>
+ * Using 24-bit little-endian representation, colors stored as Red 7:0, Green 15:8, Blue 23:16.
+ * <pre>
+ * byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----->
+ * +-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE |
+ * +-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_888}, and can also be
+ * represented as a flexible format by {@link #COLOR_FormatRGBFlexible}.
+ */
public static final int COLOR_Format24bitBGR888 = 12;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24bitARGB1887 = 13;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format25bitARGB1888 = 14;
+
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
public static final int COLOR_Format32bitBGRA8888 = 15;
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
public static final int COLOR_Format32bitARGB8888 = 16;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV411Planar = 17;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV411PackedPlanar = 18;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420Planar = 19;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420PackedPlanar = 20;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420SemiPlanar = 21;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422Planar = 22;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422PackedPlanar = 23;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422SemiPlanar = 24;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYCbYCr = 25;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYCrYCb = 26;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatCbYCrY = 27;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatCrYCbY = 28;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV444Flexible}. */
public static final int COLOR_FormatYUV444Interleaved = 29;
+
+ /**
+ * SMIA 8-bit Bayer format.
+ * Each byte represents the top 8-bits of a 10-bit signal.
+ */
public static final int COLOR_FormatRawBayer8bit = 30;
+ /**
+ * SMIA 10-bit Bayer format.
+ */
public static final int COLOR_FormatRawBayer10bit = 31;
+
+ /**
+ * SMIA 8-bit compressed Bayer format.
+ * Each byte represents a sample from the 10-bit signal that is compressed into 8-bits
+ * using DPCM/PCM compression, as defined by the SMIA Functional Specification.
+ */
public static final int COLOR_FormatRawBayer8bitcompressed = 32;
+
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
public static final int COLOR_FormatL2 = 33;
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
public static final int COLOR_FormatL4 = 34;
+
+ /**
+ * 8 bits per pixel Y color format.
+ * <p>
+ * Each byte contains a single pixel.
+ * This format corresponds to {@link android.graphics.PixelFormat#L_8}.
+ */
public static final int COLOR_FormatL8 = 35;
+
+ /**
+ * 16 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | Y |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 7 0 7
+ * bit
+ * </pre>
+ */
public static final int COLOR_FormatL16 = 36;
+ /** @deprecated Use {@link #COLOR_FormatL16}. */
public static final int COLOR_FormatL24 = 37;
+
+ /**
+ * 32 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | Y |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * 0 7 0 7 0 7 0 7
+ * bit
+ * </pre>
+ *
+ * @deprecated Use {@link #COLOR_FormatL16}.
+ */
public static final int COLOR_FormatL32 = 38;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_FormatYUV420PackedSemiPlanar = 39;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
public static final int COLOR_FormatYUV422PackedSemiPlanar = 40;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
public static final int COLOR_Format18BitBGR666 = 41;
+
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24BitARGB6666 = 42;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24BitABGR6666 = 43;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
// COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference.
// In OMX this is called OMX_COLOR_FormatAndroidOpaque.
public static final int COLOR_FormatSurface = 0x7F000789;
- public static final int COLOR_Format32BitRGBA8888 = 0x7F00A000;
- // This corresponds to YUV_420_888 format
+
+ /**
+ * 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components.
+ * <p>
+ * Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8,
+ * Blue 23:16, and Alpha 31:24.
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE | ALPHA |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This corresponds to {@link android.graphics.PixelFormat#RGBA_8888}.
+ */
+ public static final int COLOR_Format32bitABGR8888 = 0x7F00A000;
+
+ /**
+ * Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are subsampled by 2 both horizontally and vertically.
+ * Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_420_888},
+ * and can represent the {@link #COLOR_FormatYUV411Planar},
+ * {@link #COLOR_FormatYUV411PackedPlanar}, {@link #COLOR_FormatYUV420Planar},
+ * {@link #COLOR_FormatYUV420PackedPlanar}, {@link #COLOR_FormatYUV420SemiPlanar}
+ * and {@link #COLOR_FormatYUV420PackedSemiPlanar} formats.
+ *
+ * @see Image#getFormat
+ */
public static final int COLOR_FormatYUV420Flexible = 0x7F420888;
+
+ /**
+ * Flexible 16 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are horizontally subsampled by 2. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_422_888},
+ * and can represent the {@link #COLOR_FormatYCbYCr}, {@link #COLOR_FormatYCrYCb},
+ * {@link #COLOR_FormatCbYCrY}, {@link #COLOR_FormatCrYCbY},
+ * {@link #COLOR_FormatYUV422Planar}, {@link #COLOR_FormatYUV422PackedPlanar},
+ * {@link #COLOR_FormatYUV422SemiPlanar} and {@link #COLOR_FormatYUV422PackedSemiPlanar}
+ * formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV422Flexible = 0x7F422888;
+
+ /**
+ * Flexible 24 bits per pixel YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are not subsampled. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_444_888},
+ * and can represent the {@link #COLOR_FormatYUV444Interleaved} format.
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV444Flexible = 0x7F444888;
+
+ /**
+ * Flexible 24 bits per pixel RGB color format with 8-bit red, green and blue
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGB_888}, and can represent
+ * {@link #COLOR_Format24bitBGR888} and {@link #COLOR_Format24bitRGB888} formats.
+ * @see Image#getFormat.
+ */
+ public static final int COLOR_FormatRGBFlexible = 0x7F36B888;
+
+ /**
+ * Flexible 32 bits per pixel RGBA color format with 8-bit red, green, blue, and alpha
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGBA_8888}, and can represent
+ * {@link #COLOR_Format32bitBGRA8888}, {@link #COLOR_Format32bitABGR8888} and
+ * {@link #COLOR_Format32bitARGB8888} formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatRGBAFlexible = 0x7F36A888;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
/**
@@ -366,6 +586,18 @@ public final class MediaCodecInfo {
return mMime;
}
+ /**
+ * Returns the max number of the supported concurrent codec instances.
+ * <p>
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ */
+ public int getMaxSupportedInstances() {
+ return mMaxSupportedInstances;
+ }
+
private boolean isAudio() {
return mAudioCaps != null;
}
@@ -467,6 +699,15 @@ public final class MediaCodecInfo {
mEncoderCaps.setDefaultFormat(mDefaultFormat);
}
+ final Map<String, Object> global = MediaCodecList.getGlobalSettings();
+ mMaxSupportedInstances = Utils.parseIntSafely(
+ global.get("max-supported-instances"), DEFAULT_MAX_SUPPORTED_INSTANCES);
+
+ int maxInstances = Utils.parseIntSafely(
+ map.get("max-supported-instances"), mMaxSupportedInstances);
+ mMaxSupportedInstances =
+ Range.create(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
+
for (Feature feat: getValidFeatures()) {
String key = MediaFormat.KEY_FEATURE_ + feat.mName;
Integer yesNo = (Integer)map.get(key);
@@ -955,6 +1196,27 @@ public final class MediaCodecInfo {
}
/**
+ * Returns the range of achievable video frame rates for a video size.
+ * May return {@code null}, if the codec did not publish any measurement
+ * data.
+ * <p>
+ * This is a performance estimate, based on full-speed decoding
+ * and encoding measurements of common video sizes supported by the codec.
+ *
+ * @param width the width of the video
+ * @param height the height of the video
+ *
+ * @throws IllegalArgumentException if the video size is not supported.
+ */
+ public Range<Double> getAchievableFrameRatesFor(int width, int height) {
+ if (!supports(width, height, null)) {
+ throw new IllegalArgumentException("unsupported size");
+ }
+ // TODO: get this data from the codec
+ return null;
+ }
+
+ /**
* Returns whether a given video size ({@code width} and
* {@code height}) and {@code frameRate} combination is supported.
*/
diff --git a/media/java/android/media/MediaCodecList.java b/media/java/android/media/MediaCodecList.java
index 7fd0186..f44e048 100644
--- a/media/java/android/media/MediaCodecList.java
+++ b/media/java/android/media/MediaCodecList.java
@@ -21,6 +21,7 @@ import android.util.Log;
import android.media.MediaCodecInfo;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Map;
/**
* Allows you to enumerate available codecs, each specified as a {@link MediaCodecInfo} object,
@@ -61,13 +62,19 @@ final public class MediaCodecList {
return sRegularCodecInfos[index];
}
+ /* package private */ static final Map<String, Object> getGlobalSettings() {
+ return sGlobalSettings;
+ }
+
private static Object sInitLock = new Object();
private static MediaCodecInfo[] sAllCodecInfos;
private static MediaCodecInfo[] sRegularCodecInfos;
+ private static Map<String, Object> sGlobalSettings;
private static final void initCodecList() {
synchronized (sInitLock) {
if (sRegularCodecInfos == null) {
+ sGlobalSettings = native_getGlobalSettings();
int count = native_getCodecCount();
ArrayList<MediaCodecInfo> regulars = new ArrayList<MediaCodecInfo>();
ArrayList<MediaCodecInfo> all = new ArrayList<MediaCodecInfo>();
@@ -112,6 +119,8 @@ final public class MediaCodecList {
/* package private */ static native final MediaCodecInfo.CodecCapabilities
getCodecCapabilities(int index, String type);
+ /* package private */ static native final Map<String, Object> native_getGlobalSettings();
+
/* package private */ static native final int findCodecByName(String codec);
/** @hide */
diff --git a/media/java/android/media/MediaCrypto.java b/media/java/android/media/MediaCrypto.java
index c7c3fc2..474d8b9 100644
--- a/media/java/android/media/MediaCrypto.java
+++ b/media/java/android/media/MediaCrypto.java
@@ -16,6 +16,7 @@
package android.media;
+import android.annotation.NonNull;
import android.media.MediaCryptoException;
import java.util.UUID;
@@ -34,11 +35,12 @@ public final class MediaCrypto {
* this device.
* @param uuid The UUID of the crypto scheme.
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid) {
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid));
}
- private static final byte[] getByteArrayFromUUID(UUID uuid) {
+ @NonNull
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
long msb = uuid.getMostSignificantBits();
long lsb = uuid.getLeastSignificantBits();
@@ -51,7 +53,7 @@ public final class MediaCrypto {
return uuidBytes;
}
- private static final native boolean isCryptoSchemeSupportedNative(byte[] uuid);
+ private static final native boolean isCryptoSchemeSupportedNative(@NonNull byte[] uuid);
/**
* Instantiate a MediaCrypto object using opaque, crypto scheme specific
@@ -59,7 +61,7 @@ public final class MediaCrypto {
* @param uuid The UUID of the crypto scheme.
* @param initData Opaque initialization data specific to the crypto scheme.
*/
- public MediaCrypto(UUID uuid, byte[] initData) throws MediaCryptoException {
+ public MediaCrypto(@NonNull UUID uuid, @NonNull byte[] initData) throws MediaCryptoException {
native_setup(getByteArrayFromUUID(uuid), initData);
}
@@ -68,7 +70,21 @@ public final class MediaCrypto {
* to decode data of the given mime type.
* @param mime The mime type of the media data
*/
- public final native boolean requiresSecureDecoderComponent(String mime);
+ public final native boolean requiresSecureDecoderComponent(@NonNull String mime);
+
+ /**
+ * Associate a MediaDrm session with this MediaCrypto instance. The
+ * MediaDrm session is used to securely load decryption keys for a
+ * crypto scheme. The crypto keys loaded through the MediaDrm session
+ * may be selected for use during the decryption operation performed
+ * by {@link android.media.MediaCodec#queueSecureInputBuffer} by specifying
+ * their key ids in the {@link android.media.MediaCodec.CryptoInfo#key} field.
+ * @param sessionId the MediaDrm sessionId to associate with this
+ * MediaCrypto instance
+ * @throws MediaCryptoException on failure to set the sessionId
+ */
+ public final native void setMediaDrmSession(@NonNull byte[] sessionId)
+ throws MediaCryptoException;
@Override
protected void finalize() {
@@ -78,7 +94,7 @@ public final class MediaCrypto {
public native final void release();
private static native final void native_init();
- private native final void native_setup(byte[] uuid, byte[] initData)
+ private native final void native_setup(@NonNull byte[] uuid, @NonNull byte[] initData)
throws MediaCryptoException;
private native final void native_finalize();
diff --git a/media/java/android/media/MediaCryptoException.java b/media/java/android/media/MediaCryptoException.java
index 44c5222..32ddf47 100644
--- a/media/java/android/media/MediaCryptoException.java
+++ b/media/java/android/media/MediaCryptoException.java
@@ -16,12 +16,14 @@
package android.media;
+import android.annotation.Nullable;
+
/**
- * Exception thrown if MediaCrypto object could not be instantiated for
- * whatever reason.
+ * Exception thrown if MediaCrypto object could not be instantiated or
+ * if unable to perform an operation on the MediaCrypto object.
*/
public final class MediaCryptoException extends Exception {
- public MediaCryptoException(String detailMessage) {
+ public MediaCryptoException(@Nullable String detailMessage) {
super(detailMessage);
}
}
diff --git a/media/java/android/media/MediaDataSource.java b/media/java/android/media/MediaDataSource.java
new file mode 100644
index 0000000..246c0ef
--- /dev/null
+++ b/media/java/android/media/MediaDataSource.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media;
+
+import java.io.Closeable;
+
+/**
+ * For supplying media data to the framework. Implement this if your app has
+ * special requirements for the way media data is obtained.
+ *
+ * <p class="note">Methods of this interface may be called on multiple different
+ * threads. There will be a thread synchronization point between each call to ensure that
+ * modifications to the state of your MediaDataSource are visible to future calls. This means
+ * you don't need to do your own synchronization unless you're modifying the
+ * MediaDataSource from another thread while it's being used by the framework.</p>
+ */
+public interface MediaDataSource extends Closeable {
+ /**
+ * Called to request data from the given position.
+ *
+ * Implementations should should write up to {@code size} bytes into
+ * {@code buffer}, and return the number of bytes written.
+ *
+ * Return {@code 0} to indicate that {@code position} is at, or beyond, the
+ * end of the source.
+ *
+ * Return {@code -1} to indicate that a fatal error occurred. The failed
+ * read will not be retried, so transient errors should be handled
+ * internally.
+ *
+ * Throwing an exception from this method will have the same effect as
+ * returning {@code -1}.
+ *
+ * @param position the position in the data source to read from.
+ * @param buffer the buffer to read the data into.
+ * @param size the number of bytes to read.
+ * @return the number of bytes read, or -1 if there was an error.
+ */
+ public int readAt(long position, byte[] buffer, int size);
+
+ /**
+ * Called to get the size of the data source.
+ *
+ * @return the size of data source in bytes, or -1 if the size is unknown.
+ */
+ public long getSize();
+}
diff --git a/media/java/android/media/MediaDescription.java b/media/java/android/media/MediaDescription.java
index ddbffc2..afc3ca7 100644
--- a/media/java/android/media/MediaDescription.java
+++ b/media/java/android/media/MediaDescription.java
@@ -41,9 +41,13 @@ public class MediaDescription implements Parcelable {
* Extras for opaque use by apps/system.
*/
private final Bundle mExtras;
+ /**
+ * A Uri to identify this content.
+ */
+ private final Uri mMediaUri;
private MediaDescription(String mediaId, CharSequence title, CharSequence subtitle,
- CharSequence description, Bitmap icon, Uri iconUri, Bundle extras) {
+ CharSequence description, Bitmap icon, Uri iconUri, Bundle extras, Uri mediaUri) {
mMediaId = mediaId;
mTitle = title;
mSubtitle = subtitle;
@@ -51,6 +55,7 @@ public class MediaDescription implements Parcelable {
mIcon = icon;
mIconUri = iconUri;
mExtras = extras;
+ mMediaUri = mediaUri;
}
private MediaDescription(Parcel in) {
@@ -61,6 +66,7 @@ public class MediaDescription implements Parcelable {
mIcon = in.readParcelable(null);
mIconUri = in.readParcelable(null);
mExtras = in.readBundle();
+ mMediaUri = in.readParcelable(null);
}
/**
@@ -125,6 +131,15 @@ public class MediaDescription implements Parcelable {
return mExtras;
}
+ /**
+ * Returns a Uri representing this content or null.
+ *
+ * @return A media Uri or null.
+ */
+ public @Nullable Uri getMediaUri() {
+ return mMediaUri;
+ }
+
@Override
public int describeContents() {
return 0;
@@ -139,6 +154,7 @@ public class MediaDescription implements Parcelable {
dest.writeParcelable(mIcon, flags);
dest.writeParcelable(mIconUri, flags);
dest.writeBundle(mExtras);
+ dest.writeParcelable(mMediaUri, flags);
}
@Override
@@ -170,6 +186,7 @@ public class MediaDescription implements Parcelable {
private Bitmap mIcon;
private Uri mIconUri;
private Bundle mExtras;
+ private Uri mMediaUri;
/**
* Creates an initially empty builder.
@@ -257,9 +274,20 @@ public class MediaDescription implements Parcelable {
return this;
}
+ /**
+ * Sets the media uri.
+ *
+ * @param mediaUri The content's {@link Uri} for the item or null.
+ * @return this
+ */
+ public Builder setMediaUri(@Nullable Uri mediaUri) {
+ mMediaUri = mediaUri;
+ return this;
+ }
+
public MediaDescription build() {
return new MediaDescription(mMediaId, mTitle, mSubtitle, mDescription, mIcon, mIconUri,
- mExtras);
+ mExtras, mMediaUri);
}
}
}
diff --git a/media/java/android/media/MediaDrm.java b/media/java/android/media/MediaDrm.java
index 6b37a34..acff301 100644
--- a/media/java/android/media/MediaDrm.java
+++ b/media/java/android/media/MediaDrm.java
@@ -16,10 +16,17 @@
package android.media;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
-import java.util.UUID;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.UUID;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.StringDef;
import android.annotation.SystemApi;
import android.os.Handler;
import android.os.Looper;
@@ -82,6 +89,10 @@ import android.util.Log;
* encrypted content, the samples returned from the extractor remain encrypted, they
* are only decrypted when the samples are delivered to the decoder.
* <p>
+ * MediaDrm methods throw {@link java.lang.IllegalStateException}
+ * when a method is called on a MediaDrm object that is in an invalid or inoperable
+ * state. This is typically due to incorrect application API usage, but may also
+ * be due to an unrecoverable failure in the DRM plugin or security hardware.
* <a name="Callbacks"></a>
* <h3>Callbacks</h3>
* <p>Applications should register for informational events in order
@@ -94,12 +105,16 @@ import android.util.Log;
*/
public final class MediaDrm {
- private final static String TAG = "MediaDrm";
+ private static final String TAG = "MediaDrm";
private static final String PERMISSION = android.Manifest.permission.ACCESS_DRM_CERTIFICATES;
private EventHandler mEventHandler;
+ private EventHandler mOnKeysChangeEventHandler;
+ private EventHandler mOnExpirationUpdateEventHandler;
private OnEventListener mOnEventListener;
+ private OnKeysChangeListener mOnKeysChangeListener;
+ private OnExpirationUpdateListener mOnExpirationUpdateListener;
private long mNativeContext;
@@ -117,12 +132,20 @@ public final class MediaDrm {
*/
public static final int CERTIFICATE_TYPE_X509 = 1;
+ /** @hide */
+ @IntDef({
+ CERTIFICATE_TYPE_NONE,
+ CERTIFICATE_TYPE_X509,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CertificateType {}
+
/**
* Query if the given scheme identified by its UUID is supported on
* this device.
* @param uuid The UUID of the crypto scheme.
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid) {
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), null);
}
@@ -134,11 +157,12 @@ public final class MediaDrm {
* @param mimeType The MIME type of the media container, e.g. "video/mp4"
* or "video/webm"
*/
- public static final boolean isCryptoSchemeSupported(UUID uuid, String mimeType) {
+ public static final boolean isCryptoSchemeSupported(
+ @NonNull UUID uuid, @NonNull String mimeType) {
return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), mimeType);
}
- private static final byte[] getByteArrayFromUUID(UUID uuid) {
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
long msb = uuid.getMostSignificantBits();
long lsb = uuid.getLeastSignificantBits();
@@ -151,8 +175,8 @@ public final class MediaDrm {
return uuidBytes;
}
- private static final native boolean isCryptoSchemeSupportedNative(byte[] uuid,
- String mimeType);
+ private static final native boolean isCryptoSchemeSupportedNative(
+ @NonNull byte[] uuid, @Nullable String mimeType);
/**
* Instantiate a MediaDrm object
@@ -162,7 +186,7 @@ public final class MediaDrm {
* @throws UnsupportedSchemeException if the device does not support the
* specified scheme UUID
*/
- public MediaDrm(UUID uuid) throws UnsupportedSchemeException {
+ public MediaDrm(@NonNull UUID uuid) throws UnsupportedSchemeException {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
@@ -191,7 +215,7 @@ public final class MediaDrm {
/**
* @hide
*/
- public MediaDrmStateException(int errorCode, String detailMessage) {
+ public MediaDrmStateException(int errorCode, @Nullable String detailMessage) {
super(detailMessage);
mErrorCode = errorCode;
@@ -217,17 +241,176 @@ public final class MediaDrm {
* since this string will not be localized or generally comprehensible
* to end-users.
*/
+ @NonNull
public String getDiagnosticInfo() {
return mDiagnosticInfo;
}
}
/**
+ * Register a callback to be invoked when a session expiration update
+ * occurs. The app's OnExpirationUpdateListener will be notified
+ * when the expiration time of the keys in the session have changed.
+ * @param listener the callback that will be run, or {@code null} to unregister the
+ * previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnExpirationUpdateListener(
+ @Nullable OnExpirationUpdateListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnExpirationUpdateListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when a drm session
+ * expiration update occurs
+ */
+ public interface OnExpirationUpdateListener
+ {
+ /**
+ * Called when a session expiration update occurs, to inform the app
+ * about the change in expiration time
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param expirationTime the new expiration time for the keys in the session.
+ * The time is in milliseconds, relative to the Unix epoch.
+ */
+ void onExpirationUpdate(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId, long expirationTime);
+ }
+
+ /**
+ * Register a callback to be invoked when the state of keys in a session
+ * change, e.g. when a license update occurs or when a license expires.
+ *
+ * @param listener the callback that will be run when key status changes, or
+ * {@code null} to unregister the previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnKeysChangeListener(
+ @Nullable OnKeysChangeListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnKeysChangeListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the keys in a drm
+ * session change states.
+ */
+ public interface OnKeysChangeListener
+ {
+ /**
+ * Called when the keys in a session change status, such as when the license
+ * is renewed or expires.
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param keyInformation a list of {@link MediaDrm.KeyStatus}
+ * instances indicating the status for each key in the session
+ * @param hasNewUsableKey indicates if a key has been added that is usable,
+ * which may trigger an attempt to resume playback on the media stream
+ * if it is currently blocked waiting for a key.
+ */
+ void onKeysChange(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId,
+ @NonNull List<KeyStatus> keyInformation,
+ boolean hasNewUsableKey);
+ }
+
+ /**
+ * The key is currently usable to decrypt media data
+ */
+ public static final int KEY_STATUS_USABLE = 0;
+
+ /**
+ * The key is no longer usable to decrypt media data because its
+ * expiration time has passed.
+ */
+ public static final int KEY_STATUS_EXPIRED = 1;
+
+ /**
+ * The key is not currently usable to decrypt media data because its
+ * output requirements cannot currently be met.
+ */
+ public static final int KEY_STATUS_OUTPUT_NOT_ALLOWED = 2;
+
+ /**
+ * The status of the key is not yet known and is being determined.
+ * The status will be updated with the actual status when it has
+ * been determined.
+ */
+ public static final int KEY_STATUS_PENDING = 3;
+
+ /**
+ * The key is not currently usable to decrypt media data because of an
+ * internal error in processing unrelated to input parameters. This error
+ * is not actionable by an app.
+ */
+ public static final int KEY_STATUS_INTERNAL_ERROR = 4;
+
+ /** @hide */
+ @IntDef({
+ KEY_STATUS_USABLE,
+ KEY_STATUS_EXPIRED,
+ KEY_STATUS_OUTPUT_NOT_ALLOWED,
+ KEY_STATUS_PENDING,
+ KEY_STATUS_INTERNAL_ERROR,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyStatusCode {}
+
+ /**
+ * Defines the status of a key.
+ * A KeyStatus for each key in a session is provided to the
+ * {@link OnKeysChangeListener#onKeysChange}
+ * listener.
+ */
+ public static final class KeyStatus {
+ private final byte[] mKeyId;
+ private final int mStatusCode;
+
+ KeyStatus(@NonNull byte[] keyId, @KeyStatusCode int statusCode) {
+ mKeyId = keyId;
+ mStatusCode = statusCode;
+ }
+
+ /**
+ * Returns the status code for the key
+ */
+ @KeyStatusCode
+ public int getStatusCode() { return mStatusCode; }
+
+ /**
+ * Returns the id for the key
+ */
+ @NonNull
+ public byte[] getKeyId() { return mKeyId; }
+ }
+
+ /**
* Register a callback to be invoked when an event occurs
*
- * @param listener the callback that will be run
+ * @param listener the callback that will be run. Use {@code null} to
+ * stop receiving event callbacks.
*/
- public void setOnEventListener(OnEventListener listener)
+ public void setOnEventListener(@Nullable OnEventListener listener)
{
mOnEventListener = listener;
}
@@ -242,12 +425,16 @@ public final class MediaDrm {
* Called when an event occurs that requires the app to be notified
*
* @param md the MediaDrm object on which the event occurred
- * @param sessionId the DRM session ID on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred,
+ * or {@code null} if there is no session ID associated with the event.
* @param event indicates the event type
* @param extra an secondary error code
* @param data optional byte array of data that may be associated with the event
*/
- void onEvent(MediaDrm md, byte[] sessionId, int event, int extra, byte[] data);
+ void onEvent(
+ @NonNull MediaDrm md, @Nullable byte[] sessionId,
+ @DrmEvent int event, int extra,
+ @Nullable byte[] data);
}
/**
@@ -284,19 +471,32 @@ public final class MediaDrm {
*/
public static final int EVENT_SESSION_RECLAIMED = 5;
+ /** @hide */
+ @IntDef({
+ EVENT_PROVISION_REQUIRED,
+ EVENT_KEY_REQUIRED,
+ EVENT_KEY_EXPIRED,
+ EVENT_VENDOR_DEFINED,
+ EVENT_SESSION_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DrmEvent {}
+
private static final int DRM_EVENT = 200;
+ private static final int EXPIRATION_UPDATE = 201;
+ private static final int KEYS_CHANGE = 202;
private class EventHandler extends Handler
{
private MediaDrm mMediaDrm;
- public EventHandler(MediaDrm md, Looper looper) {
+ public EventHandler(@NonNull MediaDrm md, @NonNull Looper looper) {
super(looper);
mMediaDrm = md;
}
@Override
- public void handleMessage(Message msg) {
+ public void handleMessage(@NonNull Message msg) {
if (mMediaDrm.mNativeContext == 0) {
Log.w(TAG, "MediaDrm went away with unhandled events");
return;
@@ -304,8 +504,6 @@ public final class MediaDrm {
switch(msg.what) {
case DRM_EVENT:
- Log.i(TAG, "Drm event (" + msg.arg1 + "," + msg.arg2 + ")");
-
if (mOnEventListener != null) {
if (msg.obj != null && msg.obj instanceof Parcel) {
Parcel parcel = (Parcel)msg.obj;
@@ -317,11 +515,46 @@ public final class MediaDrm {
if (data.length == 0) {
data = null;
}
+
+ Log.i(TAG, "Drm event (" + msg.arg1 + "," + msg.arg2 + ")");
mOnEventListener.onEvent(mMediaDrm, sessionId, msg.arg1, msg.arg2, data);
}
}
return;
+ case KEYS_CHANGE:
+ if (mOnKeysChangeListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ List<KeyStatus> keyStatusList = keyStatusListFromParcel(parcel);
+ boolean hasNewUsableKey = (parcel.readInt() != 0);
+
+ Log.i(TAG, "Drm keys change");
+ mOnKeysChangeListener.onKeysChange(mMediaDrm, sessionId, keyStatusList,
+ hasNewUsableKey);
+ }
+ }
+ }
+ return;
+
+ case EXPIRATION_UPDATE:
+ if (mOnExpirationUpdateListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ long expirationTime = parcel.readLong();
+
+ Log.i(TAG, "Drm key expiration update: " + expirationTime);
+ mOnExpirationUpdateListener.onExpirationUpdate(mMediaDrm, sessionId,
+ expirationTime);
+ }
+ }
+ }
+ return;
+
default:
Log.e(TAG, "Unknown message type " + msg.what);
return;
@@ -329,22 +562,37 @@ public final class MediaDrm {
}
}
- /*
+ /**
+ * Parse a list of KeyStatus objects from an event parcel
+ */
+ @NonNull
+ private List<KeyStatus> keyStatusListFromParcel(@NonNull Parcel parcel) {
+ int nelems = parcel.readInt();
+ List<KeyStatus> keyStatusList = new ArrayList(nelems);
+ while (nelems-- > 0) {
+ byte[] keyId = parcel.createByteArray();
+ int keyStatusCode = parcel.readInt();
+ keyStatusList.add(new KeyStatus(keyId, keyStatusCode));
+ }
+ return keyStatusList;
+ }
+
+ /**
* This method is called from native code when an event occurs. This method
* just uses the EventHandler system to post the event back to the main app thread.
* We use a weak reference to the original MediaPlayer object so that the native
* code is safe from the object disappearing from underneath it. (This is
* the cookie passed to native_setup().)
*/
- private static void postEventFromNative(Object mediadrm_ref,
- int eventType, int extra, Object obj)
+ private static void postEventFromNative(@NonNull Object mediadrm_ref,
+ int what, int eventType, int extra, @Nullable Object obj)
{
- MediaDrm md = (MediaDrm)((WeakReference)mediadrm_ref).get();
+ MediaDrm md = (MediaDrm)((WeakReference<MediaDrm>)mediadrm_ref).get();
if (md == null) {
return;
}
if (md.mEventHandler != null) {
- Message m = md.mEventHandler.obtainMessage(DRM_EVENT, eventType, extra, obj);
+ Message m = md.mEventHandler.obtainMessage(what, eventType, extra, obj);
md.mEventHandler.sendMessage(m);
}
}
@@ -355,6 +603,7 @@ public final class MediaDrm {
* @throws NotProvisionedException if provisioning is needed
* @throws ResourceBusyException if required resources are in use
*/
+ @NonNull
public native byte[] openSession() throws NotProvisionedException,
ResourceBusyException;
@@ -362,7 +611,7 @@ public final class MediaDrm {
* Close a session on the MediaDrm object that was previously opened
* with {@link #openSession}.
*/
- public native void closeSession(byte[] sessionId);
+ public native void closeSession(@NonNull byte[] sessionId);
/**
* This key request type species that the keys will be for online use, they will
@@ -382,26 +631,85 @@ public final class MediaDrm {
*/
public static final int KEY_TYPE_RELEASE = 3;
+ /** @hide */
+ @IntDef({
+ KEY_TYPE_STREAMING,
+ KEY_TYPE_OFFLINE,
+ KEY_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyType {}
+
+ /**
+ * Key request type is initial license request
+ */
+ public static final int REQUEST_TYPE_INITIAL = 0;
+
+ /**
+ * Key request type is license renewal
+ */
+ public static final int REQUEST_TYPE_RENEWAL = 1;
+
+ /**
+ * Key request type is license release
+ */
+ public static final int REQUEST_TYPE_RELEASE = 2;
+
+ /** @hide */
+ @IntDef({
+ REQUEST_TYPE_INITIAL,
+ REQUEST_TYPE_RENEWAL,
+ REQUEST_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RequestType {}
+
/**
* Contains the opaque data an app uses to request keys from a license server
*/
- public final static class KeyRequest {
+ public static final class KeyRequest {
private byte[] mData;
private String mDefaultUrl;
+ private int mRequestType;
KeyRequest() {}
/**
* Get the opaque message data
*/
- public byte[] getData() { return mData; }
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mData;
+ }
/**
* Get the default URL to use when sending the key request message to a
* server, if known. The app may prefer to use a different license
* server URL from other sources.
+ * This method returns an empty string if the default URL is not known.
*/
- public String getDefaultUrl() { return mDefaultUrl; }
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
+
+ /**
+ * Get the type of the request
+ */
+ @RequestType
+ public int getRequestType() { return mRequestType; }
};
/**
@@ -433,12 +741,15 @@ public final class MediaDrm {
* keys, which are identified by a keySetId.
* @param optionalParameters are included in the key request message to
* allow a client application to provide additional message parameters to the server.
- *
+ * This may be {@code null} if no additional parameters are to be sent.
* @throws NotProvisionedException if reprovisioning is needed, due to a
* problem with the certifcate
*/
- public native KeyRequest getKeyRequest(byte[] scope, byte[] init,
- String mimeType, int keyType, HashMap<String, String> optionalParameters)
+ @NonNull
+ public native KeyRequest getKeyRequest(
+ @NonNull byte[] scope, @Nullable byte[] init,
+ @Nullable String mimeType, @KeyType int keyType,
+ @Nullable HashMap<String, String> optionalParameters)
throws NotProvisionedException;
@@ -460,9 +771,10 @@ public final class MediaDrm {
* reprovisioning is required
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
- * @throws ResourceBusyException if required resources are in use
*/
- public native byte[] provideKeyResponse(byte[] scope, byte[] response)
+ @Nullable
+ public native byte[] provideKeyResponse(
+ @NonNull byte[] scope, @NonNull byte[] response)
throws NotProvisionedException, DeniedByServerException;
@@ -473,14 +785,14 @@ public final class MediaDrm {
* @param sessionId the session ID for the DRM session
* @param keySetId identifies the saved key set to restore
*/
- public native void restoreKeys(byte[] sessionId, byte[] keySetId);
+ public native void restoreKeys(@NonNull byte[] sessionId, @NonNull byte[] keySetId);
/**
* Remove the current keys from a session.
*
* @param sessionId the session ID for the DRM session
*/
- public native void removeKeys(byte[] sessionId);
+ public native void removeKeys(@NonNull byte[] sessionId);
/**
* Request an informative description of the key status for the session. The status is
@@ -491,26 +803,46 @@ public final class MediaDrm {
*
* @param sessionId the session ID for the DRM session
*/
- public native HashMap<String, String> queryKeyStatus(byte[] sessionId);
+ @NonNull
+ public native HashMap<String, String> queryKeyStatus(@NonNull byte[] sessionId);
/**
* Contains the opaque data an app uses to request a certificate from a provisioning
* server
*/
- public final static class ProvisionRequest {
+ public static final class ProvisionRequest {
ProvisionRequest() {}
/**
* Get the opaque message data
*/
- public byte[] getData() { return mData; }
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mData;
+ }
/**
* Get the default URL to use when sending the provision request
* message to a server, if known. The app may prefer to use a different
* provisioning server URL obtained from other sources.
+ * This method returns an empty string if the default URL is not known.
*/
- public String getDefaultUrl() { return mDefaultUrl; }
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the ProvisionRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
private byte[] mData;
private String mDefaultUrl;
@@ -525,12 +857,14 @@ public final class MediaDrm {
* is returned in ProvisionRequest.data. The recommended URL to deliver the provision
* request to is returned in ProvisionRequest.defaultUrl.
*/
+ @NonNull
public ProvisionRequest getProvisionRequest() {
return getProvisionRequestNative(CERTIFICATE_TYPE_NONE, "");
}
+ @NonNull
private native ProvisionRequest getProvisionRequestNative(int certType,
- String certAuthority);
+ @NonNull String certAuthority);
/**
* After a provision response is received by the app, it is provided to the DRM
@@ -542,12 +876,14 @@ public final class MediaDrm {
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
*/
- public void provideProvisionResponse(byte[] response)
+ public void provideProvisionResponse(@NonNull byte[] response)
throws DeniedByServerException {
provideProvisionResponseNative(response);
}
- private native Certificate provideProvisionResponseNative(byte[] response)
+ @NonNull
+ /* could there be a valid response with 0-sized certificate or key? */
+ private native Certificate provideProvisionResponseNative(@NonNull byte[] response)
throws DeniedByServerException;
/**
@@ -577,6 +913,7 @@ public final class MediaDrm {
* record on the client is only removed after positive confirmation that the server
* received the message using releaseSecureStops().
*/
+ @NonNull
public native List<byte[]> getSecureStops();
/**
@@ -584,7 +921,8 @@ public final class MediaDrm {
*
* @param ssid - The secure stop ID provided by the license server.
*/
- public native byte[] getSecureStop(byte[] ssid);
+ @NonNull
+ public native byte[] getSecureStop(@NonNull byte[] ssid);
/**
* Process the SecureStop server response message ssRelease. After authenticating
@@ -592,7 +930,7 @@ public final class MediaDrm {
*
* @param ssRelease the server response indicating which secure stops to release
*/
- public native void releaseSecureStops(byte[] ssRelease);
+ public native void releaseSecureStops(@NonNull byte[] ssRelease);
/**
* Remove all secure stops without requiring interaction with the server.
@@ -621,6 +959,16 @@ public final class MediaDrm {
*/
public static final String PROPERTY_ALGORITHMS = "algorithms";
+ /** @hide */
+ @StringDef({
+ PROPERTY_VENDOR,
+ PROPERTY_VERSION,
+ PROPERTY_DESCRIPTION,
+ PROPERTY_ALGORITHMS,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface StringProperty {}
+
/**
* Read a DRM engine plugin String property value, given the property name string.
* <p>
@@ -628,51 +976,68 @@ public final class MediaDrm {
* {@link #PROPERTY_VENDOR}, {@link #PROPERTY_VERSION},
* {@link #PROPERTY_DESCRIPTION}, {@link #PROPERTY_ALGORITHMS}
*/
- public native String getPropertyString(String propertyName);
-
+ /* FIXME this throws IllegalStateException for invalid property names */
+ @NonNull
+ public native String getPropertyString(@NonNull @StringProperty String propertyName);
/**
* Byte array property name: the device unique identifier is established during
* device provisioning and provides a means of uniquely identifying each device.
*/
+ /* FIXME this throws IllegalStateException for invalid property names */
public static final String PROPERTY_DEVICE_UNIQUE_ID = "deviceUniqueId";
+ /** @hide */
+ @StringDef({
+ PROPERTY_DEVICE_UNIQUE_ID,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ArrayProperty {}
+
/**
* Read a DRM engine plugin byte array property value, given the property name string.
* <p>
* Standard fields names are {@link #PROPERTY_DEVICE_UNIQUE_ID}
*/
- public native byte[] getPropertyByteArray(String propertyName);
-
+ @NonNull
+ public native byte[] getPropertyByteArray(@ArrayProperty String propertyName);
/**
* Set a DRM engine plugin String property value.
*/
- public native void setPropertyString(String propertyName, String value);
+ public native void setPropertyString(
+ @StringProperty String propertyName, @NonNull String value);
/**
* Set a DRM engine plugin byte array property value.
*/
- public native void setPropertyByteArray(String propertyName, byte[] value);
-
+ public native void setPropertyByteArray(
+ @ArrayProperty String propertyName, @NonNull byte[] value);
- private static final native void setCipherAlgorithmNative(MediaDrm drm, byte[] sessionId,
- String algorithm);
+ private static final native void setCipherAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
- private static final native void setMacAlgorithmNative(MediaDrm drm, byte[] sessionId,
- String algorithm);
+ private static final native void setMacAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
- private static final native byte[] encryptNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] input, byte[] iv);
+ @NonNull
+ private static final native byte[] encryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
- private static final native byte[] decryptNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] input, byte[] iv);
+ @NonNull
+ private static final native byte[] decryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
- private static final native byte[] signNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] message);
+ @NonNull
+ private static final native byte[] signNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message);
- private static final native boolean verifyNative(MediaDrm drm, byte[] sessionId,
- byte[] keyId, byte[] message, byte[] signature);
+ private static final native boolean verifyNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message, @NonNull byte[] signature);
/**
* In addition to supporting decryption of DASH Common Encrypted Media, the
@@ -701,8 +1066,8 @@ public final class MediaDrm {
private MediaDrm mDrm;
private byte[] mSessionId;
- CryptoSession(MediaDrm drm, byte[] sessionId,
- String cipherAlgorithm, String macAlgorithm)
+ CryptoSession(@NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm, @NonNull String macAlgorithm)
{
mSessionId = sessionId;
mDrm = drm;
@@ -717,7 +1082,9 @@ public final class MediaDrm {
* @param input the data to encrypt
* @param iv the initialization vector to use for the cipher
*/
- public byte[] encrypt(byte[] keyid, byte[] input, byte[] iv) {
+ @NonNull
+ public byte[] encrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
return encryptNative(mDrm, mSessionId, keyid, input, iv);
}
@@ -728,7 +1095,9 @@ public final class MediaDrm {
* @param input the data to encrypt
* @param iv the initialization vector to use for the cipher
*/
- public byte[] decrypt(byte[] keyid, byte[] input, byte[] iv) {
+ @NonNull
+ public byte[] decrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
return decryptNative(mDrm, mSessionId, keyid, input, iv);
}
@@ -738,7 +1107,8 @@ public final class MediaDrm {
* @param keyid specifies which key to use
* @param message the data for which a signature is to be computed
*/
- public byte[] sign(byte[] keyid, byte[] message) {
+ @NonNull
+ public byte[] sign(@NonNull byte[] keyid, @NonNull byte[] message) {
return signNative(mDrm, mSessionId, keyid, message);
}
@@ -751,7 +1121,8 @@ public final class MediaDrm {
* @param signature the reference signature which will be compared with the
* computed signature
*/
- public boolean verify(byte[] keyid, byte[] message, byte[] signature) {
+ public boolean verify(
+ @NonNull byte[] keyid, @NonNull byte[] message, @NonNull byte[] signature) {
return verifyNative(mDrm, mSessionId, keyid, message, signature);
}
};
@@ -776,8 +1147,9 @@ public final class MediaDrm {
* using the method {@link #getPropertyString} with the property name
* "algorithms".
*/
- public CryptoSession getCryptoSession(byte[] sessionId,
- String cipherAlgorithm, String macAlgorithm)
+ public CryptoSession getCryptoSession(
+ @NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm, @NonNull String macAlgorithm)
{
return new CryptoSession(this, sessionId, cipherAlgorithm, macAlgorithm);
}
@@ -788,11 +1160,11 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public final static class CertificateRequest {
+ public static final class CertificateRequest {
private byte[] mData;
private String mDefaultUrl;
- CertificateRequest(byte[] data, String defaultUrl) {
+ CertificateRequest(@NonNull byte[] data, @NonNull String defaultUrl) {
mData = data;
mDefaultUrl = defaultUrl;
}
@@ -800,6 +1172,7 @@ public final class MediaDrm {
/**
* Get the opaque message data
*/
+ @NonNull
public byte[] getData() { return mData; }
/**
@@ -807,6 +1180,7 @@ public final class MediaDrm {
* message to a server, if known. The app may prefer to use a different
* certificate server URL obtained from other sources.
*/
+ @NonNull
public String getDefaultUrl() { return mDefaultUrl; }
}
@@ -822,8 +1196,9 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public CertificateRequest getCertificateRequest(int certType,
- String certAuthority)
+ @NonNull
+ public CertificateRequest getCertificateRequest(
+ @CertificateType int certType, @NonNull String certAuthority)
{
ProvisionRequest provisionRequest = getProvisionRequestNative(certType, certAuthority);
return new CertificateRequest(provisionRequest.getData(),
@@ -836,18 +1211,36 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public final static class Certificate {
+ public static final class Certificate {
Certificate() {}
/**
* Get the wrapped private key data
*/
- public byte[] getWrappedPrivateKey() { return mWrappedKey; }
+ @NonNull
+ public byte[] getWrappedPrivateKey() {
+ if (mWrappedKey == null) {
+ // this should never happen as mWrappedKey is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mWrappedKey;
+ }
/**
* Get the PEM-encoded certificate chain
*/
- public byte[] getContent() { return mCertificateData; }
+ @NonNull
+ public byte[] getContent() {
+ if (mCertificateData == null) {
+ // this should never happen as mCertificateData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mCertificateData;
+ }
private byte[] mWrappedKey;
private byte[] mCertificateData;
@@ -871,13 +1264,16 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public Certificate provideCertificateResponse(byte[] response)
+ @NonNull
+ public Certificate provideCertificateResponse(@NonNull byte[] response)
throws DeniedByServerException {
return provideProvisionResponseNative(response);
}
- private static final native byte[] signRSANative(MediaDrm drm, byte[] sessionId,
- String algorithm, byte[] wrappedKey, byte[] message);
+ @NonNull
+ private static final native byte[] signRSANative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull String algorithm, @NonNull byte[] wrappedKey, @NonNull byte[] message);
/**
* Sign data using an RSA key
@@ -890,8 +1286,10 @@ public final class MediaDrm {
*
* @hide - not part of the public API at this time
*/
- public byte[] signRSA(byte[] sessionId, String algorithm,
- byte[] wrappedKey, byte[] message) {
+ @NonNull
+ public byte[] signRSA(
+ @NonNull byte[] sessionId, @NonNull String algorithm,
+ @NonNull byte[] wrappedKey, @NonNull byte[] message) {
return signRSANative(this, sessionId, algorithm, wrappedKey, message);
}
diff --git a/media/java/android/media/MediaExtractor.java b/media/java/android/media/MediaExtractor.java
index b23b540..0bf995f 100644
--- a/media/java/android/media/MediaExtractor.java
+++ b/media/java/android/media/MediaExtractor.java
@@ -16,6 +16,9 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.content.ContentResolver;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
@@ -27,6 +30,8 @@ import android.os.IBinder;
import java.io.FileDescriptor;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
@@ -66,23 +71,29 @@ final public class MediaExtractor {
}
/**
- * Sets the DataSource object to be used as the data source for this extractor
- * {@hide}
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to extract from
+ *
+ * @throws IllegalArgumentException if dataSource is invalid.
*/
- public native final void setDataSource(DataSource source) throws IOException;
+ public native final void setDataSource(@NonNull MediaDataSource dataSource)
+ throws IOException;
/**
* Sets the data source as a content Uri.
*
* @param context the Context to use when resolving the Uri
* @param uri the Content URI of the data you want to extract from.
- * @param headers the headers to be sent together with the request for the data
+ * @param headers the headers to be sent together with the request for the data.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
*/
public final void setDataSource(
- Context context, Uri uri, Map<String, String> headers)
+ @NonNull Context context, @NonNull Uri uri, @Nullable Map<String, String> headers)
throws IOException {
String scheme = uri.getScheme();
- if(scheme == null || scheme.equals("file")) {
+ if (scheme == null || scheme.equals("file")) {
setDataSource(uri.getPath());
return;
}
@@ -121,9 +132,11 @@ final public class MediaExtractor {
* Sets the data source (file-path or http URL) to use.
*
* @param path the path of the file, or the http URL
- * @param headers the headers associated with the http request for the stream you want to play
+ * @param headers the headers associated with the http request for the stream you want to play.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
*/
- public final void setDataSource(String path, Map<String, String> headers)
+ public final void setDataSource(@NonNull String path, @Nullable Map<String, String> headers)
throws IOException {
String[] keys = null;
String[] values = null;
@@ -148,10 +161,10 @@ final public class MediaExtractor {
}
private native final void nativeSetDataSource(
- IBinder httpServiceBinder,
- String path,
- String[] keys,
- String[] values) throws IOException;
+ @NonNull IBinder httpServiceBinder,
+ @NonNull String path,
+ @Nullable String[] keys,
+ @Nullable String[] values) throws IOException;
/**
* Sets the data source (file-path or http URL) to use.
@@ -165,7 +178,7 @@ final public class MediaExtractor {
* As an alternative, the application could first open the file for reading,
* and then use the file descriptor form {@link #setDataSource(FileDescriptor)}.
*/
- public final void setDataSource(String path) throws IOException {
+ public final void setDataSource(@NonNull String path) throws IOException {
nativeSetDataSource(
MediaHTTPService.createHttpServiceBinderIfNecessary(path),
path,
@@ -179,7 +192,7 @@ final public class MediaExtractor {
*
* @param fd the FileDescriptor for the file you want to extract from.
*/
- public final void setDataSource(FileDescriptor fd) throws IOException {
+ public final void setDataSource(@NonNull FileDescriptor fd) throws IOException {
setDataSource(fd, 0, 0x7ffffffffffffffL);
}
@@ -193,7 +206,7 @@ final public class MediaExtractor {
* @param length the length in bytes of the data to be extracted
*/
public native final void setDataSource(
- FileDescriptor fd, long offset, long length) throws IOException;
+ @NonNull FileDescriptor fd, long offset, long length) throws IOException;
@Override
protected void finalize() {
@@ -216,7 +229,9 @@ final public class MediaExtractor {
* Get the PSSH info if present.
* @return a map of uuid-to-bytes, with the uuid specifying
* the crypto scheme, and the bytes being the data specific to that scheme.
+ * This can be {@code null} if the source does not contain PSSH info.
*/
+ @Nullable
public Map<UUID, byte[]> getPsshInfo() {
Map<UUID, byte[]> psshMap = null;
Map<String, Object> formatMap = getFileFormatNative();
@@ -242,16 +257,19 @@ final public class MediaExtractor {
return psshMap;
}
+ @NonNull
private native Map<String, Object> getFileFormatNative();
/**
* Get the track format at the specified index.
* More detail on the representation can be found at {@link android.media.MediaCodec}
*/
+ @NonNull
public MediaFormat getTrackFormat(int index) {
return new MediaFormat(getTrackFormatNative(index));
}
+ @NonNull
private native Map<String, Object> getTrackFormatNative(int index);
/**
@@ -283,11 +301,20 @@ final public class MediaExtractor {
*/
public static final int SEEK_TO_CLOSEST_SYNC = 2;
+ /** @hide */
+ @IntDef({
+ SEEK_TO_PREVIOUS_SYNC,
+ SEEK_TO_NEXT_SYNC,
+ SEEK_TO_CLOSEST_SYNC,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SeekMode {}
+
/**
* All selected tracks seek near the requested time according to the
* specified mode.
*/
- public native void seekTo(long timeUs, int mode);
+ public native void seekTo(long timeUs, @SeekMode int mode);
/**
* Advance to the next sample. Returns false if no more sample data
@@ -304,7 +331,7 @@ final public class MediaExtractor {
* @param byteBuf the destination byte buffer
* @return the sample size (or -1 if no more samples are available).
*/
- public native int readSampleData(ByteBuffer byteBuf, int offset);
+ public native int readSampleData(@NonNull ByteBuffer byteBuf, int offset);
/**
* Returns the track index the current sample originates from (or -1
@@ -333,9 +360,20 @@ final public class MediaExtractor {
*/
public static final int SAMPLE_FLAG_ENCRYPTED = 2;
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ SAMPLE_FLAG_SYNC,
+ SAMPLE_FLAG_ENCRYPTED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SampleFlag {}
+
/**
* Returns the current sample's flags.
*/
+ @SampleFlag
public native int getSampleFlags();
/**
@@ -346,7 +384,7 @@ final public class MediaExtractor {
* to be filled in.
* @return true iff the sample flags contain {@link #SAMPLE_FLAG_ENCRYPTED}
*/
- public native boolean getSampleCryptoInfo(MediaCodec.CryptoInfo info);
+ public native boolean getSampleCryptoInfo(@NonNull MediaCodec.CryptoInfo info);
/**
* Returns an estimate of how much data is presently cached in memory
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index 0c1c7e9..0e67daa 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -232,11 +232,22 @@ public final class MediaFormat {
public static final String KEY_TEMPORAL_LAYERING = "ts-schema";
/**
- * @hide
+ * A key describing the stride of the video bytebuffer layout.
+ * Stride (or row increment) is the difference between the index of a pixel
+ * and that of the pixel directly underneath. For YUV 420 formats, the
+ * stride corresponds to the Y plane; the stride of the U and V planes can
+ * be calculated based on the color format.
+ * The associated value is an integer, representing number of bytes.
*/
public static final String KEY_STRIDE = "stride";
+
/**
- * @hide
+ * A key describing the plane height of a multi-planar (YUV) video bytebuffer layout.
+ * Slice height (or plane height) is the number of rows that must be skipped to get
+ * from the top of the Y plane to the top of the U plane in the bytebuffer. In essence
+ * the offset of the U plane is sliceHeight * stride. The height of the U/V planes
+ * can be calculated based on the color format.
+ * The associated value is an integer, representing number of rows.
*/
public static final String KEY_SLICE_HEIGHT = "slice-height";
@@ -439,15 +450,55 @@ public final class MediaFormat {
public static final String KEY_PRIORITY = "priority";
/**
+ * A key describing the desired operating frame rate for video or sample rate for audio
+ * that the codec will need to operate at.
+ * <p>
+ * The associated value is an integer or a float representing frames-per-second or
+ * samples-per-second
+ * <p>
+ * This is used for cases like high-speed/slow-motion video capture, where the video encoder
+ * format contains the target playback rate (e.g. 30fps), but the component must be able to
+ * handle the high operating capture rate (e.g. 240fps).
+ * <p>
+ * This rate will be used by codec for resource planning and setting the operating points.
+ *
+ */
+ public static final String KEY_OPERATING_RATE = "operating-rate";
+
+ /**
* A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
* Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
- * This key is only supported for codecs that specify a profile.
+ * This key is used as a hint, and is only supported for codecs
+ * that specify a profile.
*
* @see MediaCodecInfo.CodecCapabilities#profileLevels
*/
public static final String KEY_PROFILE = "profile";
/**
+ * A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
+ * Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * This key is used as a further hint when specifying a desired profile,
+ * and is only supported for codecs that specify a level.
+ * <p>
+ * This key is ignored if the {@link #KEY_PROFILE profile} is not specified.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_LEVEL = "level";
+
+ /**
+ * A key describing the desired clockwise rotation on an output surface.
+ * This key is only used when the codec is configured using an output surface.
+ * The associated value is an integer, representing degrees.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_ROTATION = "rotation-degrees";
+
+ /**
* A key describing the desired bitrate mode to be used by an encoder.
* Constants are declared in {@link MediaCodecInfo.CodecCapabilities}.
*
diff --git a/media/java/android/media/MediaHTTPConnection.java b/media/java/android/media/MediaHTTPConnection.java
index b2886bb..d6bf421 100644
--- a/media/java/android/media/MediaHTTPConnection.java
+++ b/media/java/android/media/MediaHTTPConnection.java
@@ -32,6 +32,7 @@ import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.NoRouteToHostException;
import java.net.ProtocolException;
+import java.net.UnknownServiceException;
import java.util.HashMap;
import java.util.Map;
@@ -42,6 +43,9 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
private static final String TAG = "MediaHTTPConnection";
private static final boolean VERBOSE = false;
+ // connection timeout - 30 sec
+ private static final int CONNECT_TIMEOUT_MS = 30 * 1000;
+
private long mCurrentOffset = -1;
private URL mURL = null;
private Map<String, String> mHeaders = null;
@@ -181,6 +185,7 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
} else {
mConnection = (HttpURLConnection)url.openConnection();
}
+ mConnection.setConnectTimeout(CONNECT_TIMEOUT_MS);
// handle redirects ourselves if we do not allow cross-domain redirect
mConnection.setInstanceFollowRedirects(mAllowCrossDomainRedirect);
@@ -337,6 +342,9 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
} catch (NoRouteToHostException e) {
Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
return MEDIA_ERROR_UNSUPPORTED;
+ } catch (UnknownServiceException e) {
+ Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
+ return MEDIA_ERROR_UNSUPPORTED;
} catch (IOException e) {
if (VERBOSE) {
Log.d(TAG, "readAt " + offset + " / " + size + " => -1");
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index 9a69c06..a3ff080 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -203,7 +203,20 @@ public class MediaMetadataRetriever
}
/**
- * Call this method after setDataSource(). This method retrieves the
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException;
+
+ /**
+ * Call this method after setDataSource(). This method retrieves the
* meta data value associated with the keyCode.
*
* The keyCode currently supported is listed below as METADATA_XXX
@@ -498,5 +511,11 @@ public class MediaMetadataRetriever
* The video rotation angle may be 0, 90, 180, or 270 degrees.
*/
public static final int METADATA_KEY_VIDEO_ROTATION = 24;
+ /**
+ * This key retrieves the original capture framerate, if it's
+ * available. The capture framerate will be a floating point
+ * number.
+ */
+ public static final int METADATA_KEY_CAPTURE_FRAMERATE = 25;
// Add more here...
}
diff --git a/media/java/android/media/MediaMuxer.java b/media/java/android/media/MediaMuxer.java
index f518ab2..4b6b4fa 100644
--- a/media/java/android/media/MediaMuxer.java
+++ b/media/java/android/media/MediaMuxer.java
@@ -16,12 +16,18 @@
package android.media;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import dalvik.system.CloseGuard;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.RandomAccessFile;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.util.Map;
@@ -80,19 +86,27 @@ final public class MediaMuxer {
public static final int MUXER_OUTPUT_WEBM = 1;
};
+ /** @hide */
+ @IntDef({
+ OutputFormat.MUXER_OUTPUT_MPEG_4,
+ OutputFormat.MUXER_OUTPUT_WEBM,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Format {}
+
// All the native functions are listed here.
- private static native long nativeSetup(FileDescriptor fd, int format);
+ private static native long nativeSetup(@NonNull FileDescriptor fd, int format);
private static native void nativeRelease(long nativeObject);
private static native void nativeStart(long nativeObject);
private static native void nativeStop(long nativeObject);
- private static native int nativeAddTrack(long nativeObject, String[] keys,
- Object[] values);
- private static native void nativeSetOrientationHint(long nativeObject,
- int degrees);
+ private static native int nativeAddTrack(
+ long nativeObject, @NonNull String[] keys, @NonNull Object[] values);
+ private static native void nativeSetOrientationHint(
+ long nativeObject, int degrees);
private static native void nativeSetLocation(long nativeObject, int latitude, int longitude);
- private static native void nativeWriteSampleData(long nativeObject,
- int trackIndex, ByteBuffer byteBuf,
- int offset, int size, long presentationTimeUs, int flags);
+ private static native void nativeWriteSampleData(
+ long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf,
+ int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags);
// Muxer internal states.
private static final int MUXER_STATE_UNINITIALIZED = -1;
@@ -115,7 +129,7 @@ final public class MediaMuxer {
* @see android.media.MediaMuxer.OutputFormat
* @throws IOException if failed to open the file for write
*/
- public MediaMuxer(String path, int format) throws IOException {
+ public MediaMuxer(@NonNull String path, @Format int format) throws IOException {
if (path == null) {
throw new IllegalArgumentException("path must not be null");
}
@@ -246,11 +260,12 @@ final public class MediaMuxer {
/**
* Adds a track with the specified format.
- * @param format The media format for the track.
+ * @param format The media format for the track. This must not be an empty
+ * MediaFormat.
* @return The track index for this newly added track, and it should be used
* in the {@link #writeSampleData}.
*/
- public int addTrack(MediaFormat format) {
+ public int addTrack(@NonNull MediaFormat format) {
if (format == null) {
throw new IllegalArgumentException("format must not be null.");
}
@@ -302,8 +317,8 @@ final public class MediaMuxer {
* MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo},
* to signal sync frames.
*/
- public void writeSampleData(int trackIndex, ByteBuffer byteBuf,
- BufferInfo bufferInfo) {
+ public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf,
+ @NonNull BufferInfo bufferInfo) {
if (trackIndex < 0 || trackIndex > mLastTrackIndex) {
throw new IllegalArgumentException("trackIndex is invalid");
}
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index d77fcd8..a33fa59 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -17,6 +17,8 @@
package android.media;
import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.app.ActivityThread;
import android.app.AppOpsManager;
import android.content.ContentResolver;
@@ -44,10 +46,12 @@ import android.graphics.SurfaceTexture;
import android.media.AudioManager;
import android.media.MediaFormat;
import android.media.MediaTimeProvider;
+import android.media.PlaybackSettings;
import android.media.SubtitleController;
import android.media.SubtitleController.Anchor;
import android.media.SubtitleData;
import android.media.SubtitleTrack.RenderingWidget;
+import android.media.SyncSettings;
import com.android.internal.app.IAppOpsService;
@@ -181,7 +185,8 @@ import java.lang.ref.WeakReference;
* {@link #setDataSource(FileDescriptor)}, or
* {@link #setDataSource(String)}, or
* {@link #setDataSource(Context, Uri)}, or
- * {@link #setDataSource(FileDescriptor, long, long)} transfers a
+ * {@link #setDataSource(FileDescriptor, long, long)}, or
+ * {@link #setDataSource(MediaDataSource)} transfers a
* MediaPlayer object in the <em>Idle</em> state to the
* <em>Initialized</em> state.
* <ul>
@@ -470,16 +475,21 @@ import java.lang.ref.WeakReference;
* <td>{} </p></td>
* <td>This method can be called in any state and calling it does not change
* the object state. </p></td></tr>
- * <tr><td>setScreenOnWhilePlaying</></td>
+ * <tr><td>setPlaybackRate</p></td>
* <td>any </p></td>
* <td>{} </p></td>
* <td>This method can be called in any state and calling it does not change
- * the object state. </p></td></tr>
- * <tr><td>setPlaybackRate</p></td>
+ * the object state. </p></td></tr>
+ * <tr><td>setPlaybackSettings</p></td>
* <td>any </p></td>
* <td>{} </p></td>
* <td>This method can be called in any state and calling it does not change
* the object state. </p></td></tr>
+ * <tr><td>setScreenOnWhilePlaying</></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
* <tr><td>setVolume </p></td>
* <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
* PlaybackCompleted}</p></td>
@@ -1127,6 +1137,20 @@ public class MediaPlayer implements SubtitleController.Listener
throws IOException, IllegalArgumentException, IllegalStateException;
/**
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException;
+
+ /**
* Prepares the player for playback, synchronously.
*
* After setting the datasource and the display surface, you need to either
@@ -1327,6 +1351,8 @@ public class MediaPlayer implements SubtitleController.Listener
public native boolean isPlaying();
/**
+ * Change playback speed of audio by resampling the audio.
+ * <p>
* Specifies resampling as audio mode for variable rate playback, i.e.,
* resample the waveform based on the requested playback rate to get
* a new waveform, and play back the new waveform at the original sampling
@@ -1334,33 +1360,44 @@ public class MediaPlayer implements SubtitleController.Listener
* When rate is larger than 1.0, pitch becomes higher.
* When rate is smaller than 1.0, pitch becomes lower.
*/
- public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 0;
+ public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
/**
+ * Change playback speed of audio without changing its pitch.
+ * <p>
* Specifies time stretching as audio mode for variable rate playback.
* Time stretching changes the duration of the audio samples without
* affecting its pitch.
- * FIXME: implement time strectching.
- * @hide
+ * <p>
+ * This mode is only supported for a limited range of playback speed factors,
+ * e.g. between 1/2x and 2x.
*/
public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
+ /**
+ * Change playback speed of audio without changing its pitch, and
+ * possibly mute audio if time stretching is not supported for the playback
+ * speed.
+ * <p>
+ * Try to keep audio pitch when changing the playback rate, but allow the
+ * system to determine how to change audio playback if the rate is out
+ * of range.
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
+
/** @hide */
@IntDef(
value = {
+ PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
+ PLAYBACK_RATE_AUDIO_MODE_STRETCH,
PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
- PLAYBACK_RATE_AUDIO_MODE_STRETCH })
+ })
@Retention(RetentionPolicy.SOURCE)
public @interface PlaybackRateAudioMode {}
/**
* Sets playback rate and audio mode.
*
- * <p> The supported audio modes are:
- * <ul>
- * <li> {@link #PLAYBACK_RATE_AUDIO_MODE_RESAMPLE}
- * </ul>
- *
* @param rate the ratio between desired playback rate and normal one.
* @param audioMode audio playback mode. Must be one of the supported
* audio modes.
@@ -1370,14 +1407,68 @@ public class MediaPlayer implements SubtitleController.Listener
* @throws IllegalArgumentException if audioMode is not supported.
*/
public void setPlaybackRate(float rate, @PlaybackRateAudioMode int audioMode) {
- if (!isAudioPlaybackModeSupported(audioMode)) {
+ PlaybackSettings settings = new PlaybackSettings();
+ settings.allowDefaults();
+ switch (audioMode) {
+ case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
+ settings.setSpeed(rate).setPitch(1.0f);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
+ settings.setSpeed(rate).setPitch(1.0f)
+ .setAudioFallbackMode(settings.AUDIO_FALLBACK_MODE_FAIL);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
+ settings.setSpeed(rate).setPitch(rate);
+ break;
+ default:
final String msg = "Audio playback mode " + audioMode + " is not supported";
throw new IllegalArgumentException(msg);
}
- _setPlaybackRate(rate);
+ setPlaybackSettings(settings);
}
- private native void _setPlaybackRate(float rate) throws IllegalStateException;
+ /**
+ * Sets playback rate using {@link PlaybackSettings}.
+ *
+ * @param settings the playback settings.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if settings is not supported.
+ */
+ public native void setPlaybackSettings(@NonNull PlaybackSettings settings);
+
+ /**
+ * Gets the playback settings, containing the current playback rate.
+ *
+ * @return the playback settings.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native PlaybackSettings getPlaybackSettings();
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param settings the A/V sync settings to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if settings are not supported.
+ */
+ public native void setSyncSettings(@NonNull SyncSettings settings);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync settings
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncSettings getSyncSettings();
/**
* Seeks to specified time position.
@@ -1389,6 +1480,39 @@ public class MediaPlayer implements SubtitleController.Listener
public native void seekTo(int msec) throws IllegalStateException;
/**
+ * Get current playback position.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion. It contains the media time and system timestamp of an anchor frame
+ * ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime})
+ * and the speed of the media clock ({@link MediaTimestamp#clockRate}).
+ * <p>
+ * During regular playback, the media time moves fairly constantly (though the
+ * anchor frame may be rebased to a current system time, the linear correlation stays
+ * steady). Therefore, this method does not need to be called often.
+ * <p>
+ * To help users to get current playback position, this method always returns the timestamp of
+ * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They
+ * can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: get the timestamp from native side
+ return new MediaTimestamp(
+ getCurrentPosition() * 1000L,
+ System.nanoTime(),
+ isPlaying() ? getPlaybackSettings().getSpeed() : 0.f);
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ /**
* Gets the current playback position.
*
* @return the current position in milliseconds
@@ -1822,6 +1946,7 @@ public class MediaPlayer implements SubtitleController.Listener
public static final int MEDIA_TRACK_TYPE_AUDIO = 2;
public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3;
public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4;
+ public static final int MEDIA_TRACK_TYPE_METADATA = 5;
final int mTrackType;
final MediaFormat mFormat;
@@ -2562,6 +2687,7 @@ public class MediaPlayer implements SubtitleController.Listener
private static final int MEDIA_ERROR = 100;
private static final int MEDIA_INFO = 200;
private static final int MEDIA_SUBTITLE_DATA = 201;
+ private static final int MEDIA_META_DATA = 202;
private TimeProvider mTimeProvider;
@@ -2602,15 +2728,21 @@ public class MediaPlayer implements SubtitleController.Listener
return;
case MEDIA_STOPPED:
- if (mTimeProvider != null) {
- mTimeProvider.onStopped();
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onStopped();
+ }
}
break;
case MEDIA_STARTED:
case MEDIA_PAUSED:
- if (mTimeProvider != null) {
- mTimeProvider.onPaused(msg.what == MEDIA_PAUSED);
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onPaused(msg.what == MEDIA_PAUSED);
+ }
}
break;
@@ -2620,21 +2752,26 @@ public class MediaPlayer implements SubtitleController.Listener
return;
case MEDIA_SEEK_COMPLETE:
- if (mOnSeekCompleteListener != null) {
- mOnSeekCompleteListener.onSeekComplete(mMediaPlayer);
- }
- // fall through
+ if (mOnSeekCompleteListener != null) {
+ mOnSeekCompleteListener.onSeekComplete(mMediaPlayer);
+ }
+ // fall through
case MEDIA_SKIPPED:
- if (mTimeProvider != null) {
- mTimeProvider.onSeekComplete(mMediaPlayer);
- }
- return;
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onSeekComplete(mMediaPlayer);
+ }
+ }
+ return;
case MEDIA_SET_VIDEO_SIZE:
- if (mOnVideoSizeChangedListener != null)
- mOnVideoSizeChangedListener.onVideoSizeChanged(mMediaPlayer, msg.arg1, msg.arg2);
- return;
+ if (mOnVideoSizeChangedListener != null) {
+ mOnVideoSizeChangedListener.onVideoSizeChanged(
+ mMediaPlayer, msg.arg1, msg.arg2);
+ }
+ return;
case MEDIA_ERROR:
Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")");
@@ -2698,6 +2835,18 @@ public class MediaPlayer implements SubtitleController.Listener
}
return;
+ case MEDIA_META_DATA:
+ if (mOnTimedMetaDataListener == null) {
+ return;
+ }
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel) msg.obj;
+ TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel);
+ parcel.recycle();
+ mOnTimedMetaDataListener.onTimedMetaData(mMediaPlayer, data);
+ }
+ return;
+
case MEDIA_NOP: // interface test message - ignore
break;
@@ -2934,6 +3083,46 @@ public class MediaPlayer implements SubtitleController.Listener
private OnSubtitleDataListener mOnSubtitleDataListener;
+ /**
+ * Interface definition of a callback to be invoked when a
+ * track has timed metadata available.
+ *
+ * @see MediaPlayer#setOnTimedMetaDataListener(OnTimedMetaDataListener)
+ */
+ public interface OnTimedMetaDataListener
+ {
+ /**
+ * Called to indicate avaliable timed metadata
+ * <p>
+ * This method will be called as timed metadata is extracted from the media,
+ * in the same order as it occurs in the media. The timing of this event is
+ * not controlled by the associated timestamp.
+ *
+ * @param mp the MediaPlayer associated with this callback
+ * @param data the timed metadata sample associated with this event
+ */
+ public void onTimedMetaData(MediaPlayer mp, TimedMetaData data);
+ }
+
+ /**
+ * Register a callback to be invoked when a selected track has timed metadata available.
+ * <p>
+ * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates
+ * {@link TimedMetaData}.
+ *
+ * @see MediaPlayer#selectTrack(int)
+ * @see MediaPlayer.OnTimedMetaDataListener
+ * @see TimedMetaData
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnTimedMetaDataListener(OnTimedMetaDataListener listener)
+ {
+ mOnTimedMetaDataListener = listener;
+ }
+
+ private OnTimedMetaDataListener mOnTimedMetaDataListener;
+
/* Do not change these values without updating their counterparts
* in include/media/mediaplayer.h!
*/
@@ -3139,14 +3328,6 @@ public class MediaPlayer implements SubtitleController.Listener
mode == VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING);
}
- /*
- * Test whether a given audio playback mode is supported.
- * TODO query supported AudioPlaybackMode from player.
- */
- private boolean isAudioPlaybackModeSupported(int mode) {
- return (mode == PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
- }
-
/** @hide */
static class TimeProvider implements MediaPlayer.OnSeekCompleteListener,
MediaTimeProvider {
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 58c86f2..78fd9f0 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -140,6 +140,24 @@ public class MediaRecorder
public native Surface getSurface();
/**
+ * Configures the recorder to use a persistent surface when using SURFACE video source.
+ * <p> May only be called after {@link #prepare} in lieu of {@link #getSurface}.
+ * Frames rendered to the Surface before {@link #start} will be discarded.</p>
+
+ * @param surface a persistent input surface created by
+ * {@link MediaCodec#createPersistentInputSurface}
+ * @throws IllegalStateException if it is called before {@link #prepare}, after
+ * {@link #stop}, or is called when VideoSource is not set to SURFACE.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link MediaCodec#createPersistentInputSurface}.
+ * @see MediaCodec#createPersistentInputSurface
+ * @see MediaRecorder.VideoSource
+ */
+ public void usePersistentSurface(Surface surface) {
+ throw new IllegalArgumentException("not implemented");
+ }
+
+ /**
* Sets a Surface to show a preview of recorded media (video). Calls this
* before prepare() to make sure that the desirable preview display is
* set. If {@link #setCamera(Camera)} is used and the surface has been
@@ -157,8 +175,11 @@ public class MediaRecorder
}
/**
- * Defines the audio source. These constants are used with
- * {@link MediaRecorder#setAudioSource(int)}.
+ * Defines the audio source.
+ * An audio source defines both a default physical source of audio signal, and a recording
+ * configuration; it's also known as a capture preset. These constants are for instance used
+ * in {@link MediaRecorder#setAudioSource(int)} or
+ * {@link AudioRecord.Builder#setCapturePreset(int)}.
*/
public final class AudioSource {
@@ -168,7 +189,7 @@ public class MediaRecorder
public final static int AUDIO_SOURCE_INVALID = -1;
/* Do not change these values without updating their counterparts
- * in system/core/include/system/audio.h!
+ * in system/media/audio/include/system/audio.h!
*/
/** Default audio source **/
diff --git a/media/java/android/media/MediaRouter.java b/media/java/android/media/MediaRouter.java
index b4c612a..a046512 100644
--- a/media/java/android/media/MediaRouter.java
+++ b/media/java/android/media/MediaRouter.java
@@ -18,6 +18,7 @@ package android.media;
import android.Manifest;
import android.annotation.DrawableRes;
+import android.annotation.NonNull;
import android.app.ActivityThread;
import android.content.BroadcastReceiver;
import android.content.Context;
@@ -882,8 +883,12 @@ public class MediaRouter {
* @param types type flags indicating which types this route should be used for.
* The route must support at least a subset.
* @param route Route to select
+ * @throws IllegalArgumentException if the given route is {@code null}
*/
- public void selectRoute(int types, RouteInfo route) {
+ public void selectRoute(int types, @NonNull RouteInfo route) {
+ if (route == null) {
+ throw new IllegalArgumentException("Route cannot be null.");
+ }
selectRouteStatic(types, route, true);
}
@@ -894,7 +899,8 @@ public class MediaRouter {
selectRouteStatic(types, route, explicit);
}
- static void selectRouteStatic(int types, RouteInfo route, boolean explicit) {
+ static void selectRouteStatic(int types, @NonNull RouteInfo route, boolean explicit) {
+ assert(route != null);
final RouteInfo oldRoute = sStatic.mSelectedRoute;
if (oldRoute == route) return;
if (!route.matchesTypes(types)) {
@@ -917,7 +923,7 @@ public class MediaRouter {
final WifiDisplay activeDisplay =
sStatic.mDisplayService.getWifiDisplayStatus().getActiveDisplay();
final boolean oldRouteHasAddress = oldRoute != null && oldRoute.mDeviceAddress != null;
- final boolean newRouteHasAddress = route != null && route.mDeviceAddress != null;
+ final boolean newRouteHasAddress = route.mDeviceAddress != null;
if (activeDisplay != null || oldRouteHasAddress || newRouteHasAddress) {
if (newRouteHasAddress && !matchesDeviceAddress(activeDisplay, route)) {
if (sStatic.mCanConfigureWifiDisplays) {
@@ -1500,18 +1506,18 @@ public class MediaRouter {
/**
* The default playback type, "local", indicating the presentation of the media is happening
- * on the same device (e.g. a phone, a tablet) as where it is controlled from.
+ * on the same device (e&#46;g&#46; a phone, a tablet) as where it is controlled from.
* @see #getPlaybackType()
*/
public final static int PLAYBACK_TYPE_LOCAL = 0;
/**
* A playback type indicating the presentation of the media is happening on
- * a different device (i.e. the remote device) than where it is controlled from.
+ * a different device (i&#46;e&#46; the remote device) than where it is controlled from.
* @see #getPlaybackType()
*/
public final static int PLAYBACK_TYPE_REMOTE = 1;
/**
- * Playback information indicating the playback volume is fixed, i.e. it cannot be
+ * Playback information indicating the playback volume is fixed, i&#46;e&#46; it cannot be
* controlled from this object. An example of fixed playback volume is a remote player,
* playing over HDMI where the user prefers to control the volume on the HDMI sink, rather
* than attenuate at the source.
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index 1255276..9ea6722 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -553,15 +553,8 @@ public class MediaScanner
boolean isimage = MediaFile.isImageFileType(mFileType);
if (isaudio || isvideo || isimage) {
- if (mExternalIsEmulated && path.startsWith(mExternalStoragePath)) {
- // try to rewrite the path to bypass the sd card fuse layer
- String directPath = Environment.getMediaStorageDirectory() +
- path.substring(mExternalStoragePath.length());
- File f = new File(directPath);
- if (f.exists()) {
- path = directPath;
- }
- }
+ path = Environment.maybeTranslateEmulatedPathToInternal(new File(path))
+ .getAbsolutePath();
}
// we only extract metadata for audio and video files
diff --git a/media/java/android/media/MediaSync.java b/media/java/android/media/MediaSync.java
new file mode 100644
index 0000000..dc6760d
--- /dev/null
+++ b/media/java/android/media/MediaSync.java
@@ -0,0 +1,733 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.AudioTrack;
+import android.media.PlaybackSettings;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * MediaSync class can be used to synchronously playback audio and video streams.
+ * It can be used to play audio-only or video-only stream, too.
+ *
+ * <p>MediaSync is generally used like this:
+ * <pre>
+ * MediaSync sync = new MediaSync();
+ * sync.setSurface(surface);
+ * Surface inputSurface = sync.createInputSurface();
+ * ...
+ * // MediaCodec videoDecoder = ...;
+ * videoDecoder.configure(format, inputSurface, ...);
+ * ...
+ * sync.setAudioTrack(audioTrack);
+ * sync.setCallback(new MediaSync.Callback() {
+ * {@literal @Override}
+ * public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferIndex) {
+ * ...
+ * }
+ * }, null);
+ * // This needs to be done since sync is paused on creation.
+ * sync.setPlaybackRate(1.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ *
+ * for (;;) {
+ * ...
+ * // send video frames to surface for rendering, e.g., call
+ * // videoDecoder.releaseOutputBuffer(videoOutputBufferIx, videoPresentationTimeNs);
+ * // More details are available as below.
+ * ...
+ * sync.queueAudio(audioByteBuffer, bufferIndex, size, audioPresentationTimeUs); // non-blocking.
+ * // The audioByteBuffer and bufferIndex will be returned via callback.
+ * // More details are available as below.
+ * ...
+ * ...
+ * }
+ * sync.setPlaybackRate(0.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
+ * sync.release();
+ * sync = null;
+ *
+ * // The following code snippet illustrates how video/audio raw frames are created by
+ * // MediaCodec's, how they are fed to MediaSync and how they are returned by MediaSync.
+ * // This is the callback from MediaCodec.
+ * onOutputBufferAvailable(MediaCodec codec, int bufferIndex, BufferInfo info) {
+ * // ...
+ * if (codec == videoDecoder) {
+ * // surface timestamp must contain media presentation time in nanoseconds.
+ * codec.releaseOutputBuffer(bufferIndex, 1000 * info.presentationTime);
+ * } else {
+ * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferIndex);
+ * sync.queueByteBuffer(audioByteBuffer, bufferIndex, info.size, info.presentationTime);
+ * }
+ * // ...
+ * }
+ *
+ * // This is the callback from MediaSync.
+ * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferIndex) {
+ * // ...
+ * audioDecoder.releaseBuffer(bufferIndex, false);
+ * // ...
+ * }
+ *
+ * </pre>
+ *
+ * The client needs to configure corresponding sink by setting the Surface and/or AudioTrack
+ * based on the stream type it will play.
+ * <p>
+ * For video, the client needs to call {@link #createInputSurface} to obtain a surface on
+ * which it will render video frames.
+ * <p>
+ * For audio, the client needs to set up audio track correctly, e.g., using {@link
+ * AudioTrack#MODE_STREAM}. The audio buffers are sent to MediaSync directly via {@link
+ * #queueAudio}, and are returned to the client via {@link Callback#onAudioBufferConsumed}
+ * asynchronously. The client should not modify an audio buffer till it's returned.
+ * <p>
+ * The client can optionally pre-fill audio/video buffers by setting playback rate to 0.0,
+ * and then feed audio/video buffers to corresponding components. This can reduce possible
+ * initial underrun.
+ * <p>
+ */
+final public class MediaSync {
+ /**
+ * MediaSync callback interface. Used to notify the user asynchronously
+ * of various MediaSync events.
+ */
+ public static abstract class Callback {
+ /**
+ * Called when returning an audio buffer which has been consumed.
+ *
+ * @param sync The MediaSync object.
+ * @param audioBuffer The returned audio buffer.
+ * @param bufferIndex The index associated with the audio buffer
+ */
+ public abstract void onAudioBufferConsumed(
+ @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferIndex);
+ }
+
+ /** Audio track failed.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_AUDIOTRACK_FAIL = 1;
+
+ /** The surface failed to handle video buffers.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_SURFACE_FAIL = 2;
+
+ /**
+ * Interface definition of a callback to be invoked when there
+ * has been an error during an asynchronous operation (other errors
+ * will throw exceptions at method call time).
+ */
+ public interface OnErrorListener {
+ /**
+ * Called to indicate an error.
+ *
+ * @param sync The MediaSync the error pertains to
+ * @param what The type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIASYNC_ERROR_AUDIOTRACK_FAIL}
+ * <li>{@link #MEDIASYNC_ERROR_SURFACE_FAIL}
+ * </ul>
+ * @param extra an extra code, specific to the error. Typically
+ * implementation dependent.
+ */
+ void onError(@NonNull MediaSync sync, int what, int extra);
+ }
+
+ private static final String TAG = "MediaSync";
+
+ private static final int EVENT_CALLBACK = 1;
+ private static final int EVENT_SET_CALLBACK = 2;
+
+ private static final int CB_RETURN_AUDIO_BUFFER = 1;
+
+ private static class AudioBuffer {
+ public ByteBuffer mByteBuffer;
+ public int mBufferIndex;
+ public int mSizeInBytes;
+ long mPresentationTimeUs;
+
+ public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferIndex,
+ int sizeInBytes, long presentationTimeUs) {
+ mByteBuffer = byteBuffer;
+ mBufferIndex = bufferIndex;
+ mSizeInBytes = sizeInBytes;
+ mPresentationTimeUs = presentationTimeUs;
+ }
+ }
+
+ private final Object mCallbackLock = new Object();
+ private Handler mCallbackHandler = null;
+ private MediaSync.Callback mCallback = null;
+
+ private final Object mOnErrorListenerLock = new Object();
+ private Handler mOnErrorListenerHandler = null;
+ private MediaSync.OnErrorListener mOnErrorListener = null;
+
+ private Thread mAudioThread = null;
+ // Created on mAudioThread when mAudioThread is started. When used on user thread, they should
+ // be guarded by checking mAudioThread.
+ private Handler mAudioHandler = null;
+ private Looper mAudioLooper = null;
+
+ private final Object mAudioLock = new Object();
+ private AudioTrack mAudioTrack = null;
+ private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>();
+ private float mPlaybackRate = 0.0f;
+
+ private long mNativeContext;
+
+ /**
+ * Class constructor. On creation, MediaSync is paused, i.e., playback rate is 0.0f.
+ */
+ public MediaSync() {
+ native_setup();
+ }
+
+ private native final void native_setup();
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ private native final void native_finalize();
+
+ /**
+ * Make sure you call this when you're done to free up any opened
+ * component instance instead of relying on the garbage collector
+ * to do this for you at some point in the future.
+ */
+ public final void release() {
+ returnAudioBuffers();
+ if (mAudioThread != null) {
+ if (mAudioLooper != null) {
+ mAudioLooper.quit();
+ }
+ }
+ setCallback(null, null);
+ native_release();
+ }
+
+ private native final void native_release();
+
+ /**
+ * Sets an asynchronous callback for actionable MediaSync events.
+ * <p>
+ * This method can be called multiple times to update a previously set callback. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param cb The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setCallback(@Nullable /* MediaSync. */ Callback cb, @Nullable Handler handler) {
+ synchronized(mCallbackLock) {
+ if (handler != null) {
+ mCallbackHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mCallbackHandler = null;
+ } else {
+ mCallbackHandler = new Handler(looper);
+ }
+ }
+
+ mCallback = cb;
+ }
+ }
+
+ /**
+ * Sets an asynchronous callback for error events.
+ * <p>
+ * This method can be called multiple times to update a previously set listener. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param listener The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setOnErrorListener(@Nullable /* MediaSync. */ OnErrorListener listener,
+ @Nullable Handler handler) {
+ synchronized(mOnErrorListenerLock) {
+ if (handler != null) {
+ mOnErrorListenerHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mOnErrorListenerHandler = null;
+ } else {
+ mOnErrorListenerHandler = new Handler(looper);
+ }
+ }
+
+ mOnErrorListener = listener;
+ }
+ }
+
+ /**
+ * Sets the output surface for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param surface Specify a surface on which to render the video data.
+ * @throws IllegalArgumentException if the surface has been released, is invalid,
+ * or can not be connected.
+ * @throws IllegalStateException if setting the surface is not supported, e.g.
+ * not in the Initialized state, or another surface has already been configured.
+ */
+ public void setSurface(@Nullable Surface surface) {
+ native_configureSurface(surface);
+ }
+
+ private native final void native_configureSurface(@Nullable Surface surface);
+
+ /**
+ * Sets the audio track for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param audioTrack Specify an AudioTrack through which to render the audio data.
+ * @throws IllegalArgumentException if the audioTrack has been released, or is invalid.
+ * @throws IllegalStateException if setting the audio track is not supported, e.g.
+ * not in the Initialized state, or another audio track has already been configured.
+ */
+ public void setAudioTrack(@Nullable AudioTrack audioTrack) {
+ // AudioTrack has sanity check for configured sample rate.
+ int nativeSampleRateInHz = (audioTrack == null ? 0 : audioTrack.getSampleRate());
+
+ native_configureAudioTrack(audioTrack, nativeSampleRateInHz);
+ mAudioTrack = audioTrack;
+ if (audioTrack != null && mAudioThread == null) {
+ createAudioThread();
+ }
+ }
+
+ private native final void native_configureAudioTrack(
+ @Nullable AudioTrack audioTrack, int nativeSampleRateInHz);
+
+ /**
+ * Requests a Surface to use as the input. This may only be called after
+ * {@link #setSurface}.
+ * <p>
+ * The application is responsible for calling release() on the Surface when
+ * done.
+ * @throws IllegalStateException if not configured, or another input surface has
+ * already been created.
+ */
+ @NonNull
+ public native final Surface createInputSurface();
+
+ /**
+ * Resample audio data when changing playback speed.
+ * <p>
+ * Resample the waveform based on the requested playback rate to get
+ * a new waveform, and play back the new waveform at the original sampling
+ * frequency.
+ * <p><ul>
+ * <li>When rate is larger than 1.0, pitch becomes higher.
+ * <li>When rate is smaller than 1.0, pitch becomes lower.
+ * </ul>
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
+
+ /**
+ * Time stretch audio when changing playback speed.
+ * <p>
+ * Time stretching changes the duration of the audio samples without
+ * affecting their pitch. This is only supported for a limited range
+ * of playback speeds, e.g. from 1/2x to 2x. If the rate is adjusted
+ * beyond this limit, the rate change will fail.
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
+
+ /**
+ * Time stretch audio when changing playback speed, and may mute if
+ * stretching is no longer supported.
+ * <p>
+ * Time stretching changes the duration of the audio samples without
+ * affecting their pitch. This is only supported for a limited range
+ * of playback speeds, e.g. from 1/2x to 2x. When it is no longer
+ * supported, the audio may be muted. Using this mode will not fail
+ * for non-negative playback rates.
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
+ PLAYBACK_RATE_AUDIO_MODE_STRETCH,
+ PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlaybackRateAudioMode {}
+
+ /**
+ * Sets playback rate and audio mode.
+ *
+ * @param rate the ratio between desired playback rate and normal one. 1.0 means normal
+ * playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
+ * while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
+ * does not change as a result of this call. To restore the original rate at any time,
+ * use 1.0.
+ * @param audioMode audio playback mode. Must be one of the supported
+ * audio modes.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ * @throws IllegalArgumentException if audioMode is not supported.
+ */
+ public void setPlaybackRate(float rate, @PlaybackRateAudioMode int audioMode) {
+ PlaybackSettings rateSettings = new PlaybackSettings();
+ rateSettings.allowDefaults();
+ switch (audioMode) {
+ case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
+ rateSettings.setSpeed(rate).setPitch(1.0f);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
+ rateSettings.setSpeed(rate).setPitch(1.0f)
+ .setAudioFallbackMode(rateSettings.AUDIO_FALLBACK_MODE_FAIL);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
+ rateSettings.setSpeed(rate).setPitch(rate);
+ break;
+ default:
+ {
+ final String msg = "Audio playback mode " + audioMode + " is not supported";
+ throw new IllegalArgumentException(msg);
+ }
+ }
+ setPlaybackSettings(rateSettings);
+ }
+
+ /**
+ * Sets playback rate using {@link PlaybackSettings}.
+ * <p>
+ * When using MediaSync with {@link AudioTrack}, set playback settings using this
+ * call instead of calling it directly on the track, so that the sync is aware of
+ * the settings change.
+ * <p>
+ * This call also works if there is no audio track.
+ *
+ * @param settings the playback settings to use. {@link PlaybackSettings#getSpeed
+ * Speed} is the ratio between desired playback rate and normal one. 1.0 means
+ * normal playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
+ * while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
+ * does not change as a result of this call. To restore the original rate at any time,
+ * use speed of 1.0.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ * @throws IllegalArgumentException if the settings are not supported.
+ */
+ public void setPlaybackSettings(@NonNull PlaybackSettings settings) {
+ float rate;
+ try {
+ rate = settings.getSpeed();
+
+ // rate is specified
+ if (mAudioTrack != null) {
+ try {
+ if (rate == 0.0) {
+ mAudioTrack.pause();
+ } else {
+ mAudioTrack.setPlaybackSettings(settings);
+ mAudioTrack.play();
+ }
+ } catch (IllegalStateException e) {
+ throw e;
+ }
+ }
+
+ synchronized(mAudioLock) {
+ mPlaybackRate = rate;
+ }
+ if (mPlaybackRate != 0.0 && mAudioThread != null) {
+ postRenderAudio(0);
+ }
+ native_setPlaybackRate(mPlaybackRate);
+ } catch (IllegalStateException e) {
+ // rate is not specified; still, propagate settings to audio track
+ if (mAudioTrack != null) {
+ mAudioTrack.setPlaybackSettings(settings);
+ }
+ }
+ }
+
+ /**
+ * Gets the playback rate using {@link PlaybackSettings}.
+ *
+ * @return the playback rate being used.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ */
+ @NonNull
+ public PlaybackSettings getPlaybackSettings() {
+ if (mAudioTrack != null) {
+ return mAudioTrack.getPlaybackSettings();
+ } else {
+ PlaybackSettings settings = new PlaybackSettings();
+ settings.allowDefaults();
+ settings.setSpeed(mPlaybackRate);
+ return settings;
+ }
+ }
+
+ private native final void native_setPlaybackRate(float rate);
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param settings the A/V sync settings to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if settings are not supported.
+ */
+ public native void setSyncSettings(@NonNull SyncSettings settings);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync settings
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncSettings getSyncSettings();
+
+ /**
+ * Flushes all buffers from the sync object.
+ * <p>
+ * No callbacks are received for the flushed buffers.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ public void flush() {
+ synchronized(mAudioLock) {
+ mAudioBuffers.clear();
+ mCallbackHandler.removeCallbacksAndMessages(null);
+ }
+ // TODO implement this for surface buffers.
+ }
+
+ /**
+ * Get current playback position.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion. It contains the media time and system timestamp of an anchor frame
+ * ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime})
+ * and the speed of the media clock ({@link MediaTimestamp#clockRate}).
+ * <p>
+ * During regular playback, the media time moves fairly constantly (though the
+ * anchor frame may be rebased to a current system time, the linear correlation stays
+ * steady). Therefore, this method does not need to be called often.
+ * <p>
+ * To help users to get current playback position, this method always returns the timestamp of
+ * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They
+ * can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media sync has not been initialized.
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: create the timestamp in native
+ MediaTimestamp timestamp = new MediaTimestamp();
+ if (native_getTimestamp(timestamp)) {
+ return timestamp;
+ } else {
+ return null;
+ }
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ private native final boolean native_getTimestamp(@NonNull MediaTimestamp timestamp);
+
+ /**
+ * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode).
+ * @param audioData the buffer that holds the data to play. This buffer will be returned
+ * to the client via registered callback.
+ * @param bufferIndex the buffer index used to identify audioData. It will be returned to
+ * the client along with audioData. This helps applications to keep track of audioData.
+ * @param sizeInBytes number of bytes to queue.
+ * @param presentationTimeUs the presentation timestamp in microseconds for the first frame
+ * in the buffer.
+ * @throws IllegalStateException if audio track is not configured or internal configureation
+ * has not been done correctly.
+ */
+ public void queueAudio(
+ @NonNull ByteBuffer audioData, int bufferIndex, int sizeInBytes,
+ long presentationTimeUs) {
+ if (mAudioTrack == null || mAudioThread == null) {
+ throw new IllegalStateException(
+ "AudioTrack is NOT configured or audio thread is not created");
+ }
+
+ synchronized(mAudioLock) {
+ mAudioBuffers.add(new AudioBuffer(
+ audioData, bufferIndex, sizeInBytes, presentationTimeUs));
+ }
+
+ if (mPlaybackRate != 0.0) {
+ postRenderAudio(0);
+ }
+ }
+
+ // When called on user thread, make sure to check mAudioThread != null.
+ private void postRenderAudio(long delayMillis) {
+ mAudioHandler.postDelayed(new Runnable() {
+ public void run() {
+ synchronized(mAudioLock) {
+ if (mPlaybackRate == 0.0) {
+ return;
+ }
+
+ if (mAudioBuffers.isEmpty()) {
+ return;
+ }
+
+ AudioBuffer audioBuffer = mAudioBuffers.get(0);
+ int sizeWritten = mAudioTrack.write(
+ audioBuffer.mByteBuffer,
+ audioBuffer.mSizeInBytes,
+ AudioTrack.WRITE_NON_BLOCKING);
+ if (sizeWritten > 0) {
+ if (audioBuffer.mPresentationTimeUs != -1) {
+ native_updateQueuedAudioData(
+ audioBuffer.mSizeInBytes, audioBuffer.mPresentationTimeUs);
+ audioBuffer.mPresentationTimeUs = -1;
+ }
+
+ if (sizeWritten == audioBuffer.mSizeInBytes) {
+ postReturnByteBuffer(audioBuffer);
+ mAudioBuffers.remove(0);
+ if (!mAudioBuffers.isEmpty()) {
+ postRenderAudio(0);
+ }
+ return;
+ }
+
+ audioBuffer.mSizeInBytes -= sizeWritten;
+ }
+ long pendingTimeMs = TimeUnit.MICROSECONDS.toMillis(
+ native_getPlayTimeForPendingAudioFrames());
+ postRenderAudio(pendingTimeMs / 2);
+ }
+ }
+ }, delayMillis);
+ }
+
+ private native final void native_updateQueuedAudioData(
+ int sizeInBytes, long presentationTimeUs);
+
+ private native final long native_getPlayTimeForPendingAudioFrames();
+
+ private final void postReturnByteBuffer(@NonNull final AudioBuffer audioBuffer) {
+ synchronized(mCallbackLock) {
+ if (mCallbackHandler != null) {
+ final MediaSync sync = this;
+ mCallbackHandler.post(new Runnable() {
+ public void run() {
+ synchronized(mCallbackLock) {
+ if (mCallbackHandler == null
+ || mCallbackHandler.getLooper().getThread()
+ != Thread.currentThread()) {
+ // callback handler has been changed.
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer,
+ audioBuffer.mBufferIndex);
+ }
+ }
+ }
+ });
+ }
+ }
+ }
+
+ private final void returnAudioBuffers() {
+ synchronized(mAudioLock) {
+ for (AudioBuffer audioBuffer: mAudioBuffers) {
+ postReturnByteBuffer(audioBuffer);
+ }
+ mAudioBuffers.clear();
+ }
+ }
+
+ private void createAudioThread() {
+ mAudioThread = new Thread() {
+ @Override
+ public void run() {
+ Looper.prepare();
+ synchronized(mAudioLock) {
+ mAudioLooper = Looper.myLooper();
+ mAudioHandler = new Handler();
+ mAudioLock.notify();
+ }
+ Looper.loop();
+ }
+ };
+ mAudioThread.start();
+
+ synchronized(mAudioLock) {
+ try {
+ mAudioLock.wait();
+ } catch(InterruptedException e) {
+ }
+ }
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private static native final void native_init();
+}
diff --git a/media/java/android/media/MediaTimestamp.java b/media/java/android/media/MediaTimestamp.java
new file mode 100644
index 0000000..d3d5618
--- /dev/null
+++ b/media/java/android/media/MediaTimestamp.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An immutable object that represents the linear correlation between the media time
+ * and the system time. It contains the media clock rate, together with the media timestamp
+ * of an anchor frame and the system time when that frame was presented or is committed
+ * to be presented.
+ * <p>
+ * The phrase "present" means that audio/video produced on device is detectable by an external
+ * observer off device.
+ * The time is based on the implementation's best effort, using whatever knowledge
+ * is available to the system, but cannot account for any delay unknown to the implementation.
+ * The anchor frame could be any frame, including a just-rendered frame, or even a theoretical
+ * or in-between frame, based on the source of the MediaTimestamp.
+ * When the anchor frame is a just-rendered one, the media time stands for
+ * current position of the playback or recording.
+ *
+ * @see MediaSync#getTimestamp
+ * @see MediaPlayer#getTimestamp
+ */
+public final class MediaTimestamp
+{
+ /**
+ * Media time in microseconds.
+ */
+ public final long mediaTimeUs;
+
+ /**
+ * The {@link java.lang.System#nanoTime system time} corresponding to the media time
+ * in nanoseconds.
+ */
+ public final long nanoTime;
+
+ /**
+ * The rate of the media clock in relation to the system time.
+ * It is 1.0 if media clock advances in sync with the system clock;
+ * greater than 1.0 if media clock is faster than the system clock;
+ * less than 1.0 if media clock is slower than the system clock.
+ */
+ public final float clockRate;
+
+ /** @hide */
+ MediaTimestamp(long mediaUs, long systemNs, float rate) {
+ mediaTimeUs = mediaUs;
+ nanoTime = systemNs;
+ clockRate = rate;
+ }
+
+ /** @hide */
+ MediaTimestamp() {
+ mediaTimeUs = 0;
+ nanoTime = 0;
+ clockRate = 1.0f;
+ }
+}
diff --git a/media/java/android/media/OnAudioDeviceConnectionListener.java b/media/java/android/media/OnAudioDeviceConnectionListener.java
index 4bdd4d0..71c135a 100644
--- a/media/java/android/media/OnAudioDeviceConnectionListener.java
+++ b/media/java/android/media/OnAudioDeviceConnectionListener.java
@@ -16,13 +16,16 @@
package android.media;
-import java.util.ArrayList;
-
/**
- * @hide
- * API candidate
+ * OnAudioDeviceConnectionListener defines the interface for notification listeners in the
+ * {@link AudioDevicesManager}
*/
-public abstract class OnAudioDeviceConnectionListener {
- public void onConnect(ArrayList<AudioDevicesManager.AudioDeviceInfo> devices) {}
- public void onDisconnect(ArrayList<AudioDevicesManager.AudioDeviceInfo> devices) {}
+public interface OnAudioDeviceConnectionListener {
+ /**
+ * Called by the {@link AudioDevicesManager} to indicate that an audio device has been
+ * connected or disconnected. A listener will probably call the
+ * {@link AudioDevicesManager#listDevices} method to retrieve the current list of audio
+ * devices.
+ */
+ public void onAudioDeviceConnection();
}
diff --git a/media/java/android/media/OnAudioRecordRoutingListener.java b/media/java/android/media/OnAudioRecordRoutingListener.java
new file mode 100644
index 0000000..8ff41c5
--- /dev/null
+++ b/media/java/android/media/OnAudioRecordRoutingListener.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * OnAudioDeviceConnectionListener defines the interface for notification listeners in the
+ * {@link AudioDevicesManager}
+ */
+public interface OnAudioRecordRoutingListener {
+ /**
+ * Called when the routing of an AudioRecord changes from either and explicit or
+ * policy rerouting.
+ */
+ public void onAudioRecordRouting(AudioRecord audioRecord);
+}
diff --git a/media/java/android/media/OnAudioTrackRoutingListener.java b/media/java/android/media/OnAudioTrackRoutingListener.java
new file mode 100644
index 0000000..18c72ef
--- /dev/null
+++ b/media/java/android/media/OnAudioTrackRoutingListener.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * OnAudioDeviceConnectionListener defines the interface for notification listeners in the
+ * {@link AudioDevicesManager}
+ */
+public interface OnAudioTrackRoutingListener {
+ /**
+ * Called when the routing of an AudioTrack changes from either and explicit or
+ * policy rerouting.
+ */
+ public void onAudioTrackRouting(AudioTrack audioTrack);
+}
diff --git a/media/java/android/media/PlaybackSettings.java b/media/java/android/media/PlaybackSettings.java
new file mode 100644
index 0000000..b2e1033
--- /dev/null
+++ b/media/java/android/media/PlaybackSettings.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import android.annotation.IntDef;
+
+/**
+ * Structure for common playback settings.
+ *
+ * Used by {@link AudioTrack} {@link AudioTrack#getPlaybackSettings()} and
+ * {@link AudioTrack#setPlaybackSettings(PlaybackSettings)}
+ * to control playback behavior.
+ * <p> <strong>audio fallback mode:</strong>
+ * select out-of-range parameter handling.
+ * <ul>
+ * <li> {@link PlaybackSettings#AUDIO_FALLBACK_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link PlaybackSettings#AUDIO_FALLBACK_MODE_MUTE}:
+ * Play silence for settings normally out of range.</li>
+ * <li> {@link PlaybackSettings#AUDIO_FALLBACK_MODE_FAIL}:
+ * Return {@link java.lang.IllegalArgumentException} from
+ * <code>AudioTrack.setPlaybackSettings(PlaybackSettings)</code>.</li>
+ * </ul>
+ * <p> <strong>pitch:</strong> increases or decreases the tonal frequency of the audio content.
+ * It is expressed as a multiplicative factor, where normal pitch is 1.0f.
+ * <p> <strong>speed:</strong> increases or decreases the time to
+ * play back a set of audio or video frames.
+ * It is expressed as a multiplicative factor, where normal speed is 1.0f.
+ * <p> Different combinations of speed and pitch may be used for audio playback;
+ * some common ones:
+ * <ul>
+ * <li> <em>Pitch equals 1.0f.</em> Speed change will be done with pitch preserved,
+ * often called <em>timestretching</em>.</li>
+ * <li> <em>Pitch equals speed.</em> Speed change will be done by <em>resampling</em>,
+ * similar to {@link AudioTrack#setPlaybackRate(int)}.</li>
+ * </ul>
+ */
+public final class PlaybackSettings {
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_FALLBACK_MODE_DEFAULT,
+ AUDIO_FALLBACK_MODE_MUTE,
+ AUDIO_FALLBACK_MODE_FAIL,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioFallbackMode {}
+ public static final int AUDIO_FALLBACK_MODE_DEFAULT = 0;
+ public static final int AUDIO_FALLBACK_MODE_MUTE = 1;
+ public static final int AUDIO_FALLBACK_MODE_FAIL = 2;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_STRETCH_MODE_DEFAULT,
+ AUDIO_STRETCH_MODE_VOICE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioStretchMode {}
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_DEFAULT = 0;
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_VOICE = 1;
+
+ // flags to indicate which settings are actually set
+ private static final int SET_SPEED = 1 << 0;
+ private static final int SET_PITCH = 1 << 1;
+ private static final int SET_AUDIO_FALLBACK_MODE = 1 << 2;
+ private static final int SET_AUDIO_STRETCH_MODE = 1 << 3;
+ private int mSet = 0;
+
+ // settings
+ private int mAudioFallbackMode = AUDIO_FALLBACK_MODE_DEFAULT;
+ private int mAudioStretchMode = AUDIO_STRETCH_MODE_DEFAULT;
+ private float mPitch = 1.0f;
+ private float mSpeed = 1.0f;
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>PlaybackSettings</code> instance.
+ */
+ public PlaybackSettings allowDefaults() {
+ mSet |= SET_AUDIO_FALLBACK_MODE | SET_AUDIO_STRETCH_MODE | SET_PITCH | SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Sets the audio fallback mode.
+ * @param audioFallbackMode
+ * @return this <code>PlaybackSettings</code> instance.
+ */
+ public PlaybackSettings setAudioFallbackMode(@AudioFallbackMode int audioFallbackMode) {
+ mAudioFallbackMode = audioFallbackMode;
+ mSet |= SET_AUDIO_FALLBACK_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio fallback mode.
+ * @return audio fallback mode
+ * @throws IllegalStateException if the audio fallback mode is not set.
+ */
+ public @AudioFallbackMode int getAudioFallbackMode() {
+ if ((mSet & SET_AUDIO_FALLBACK_MODE) == 0) {
+ throw new IllegalStateException("audio fallback mode not set");
+ }
+ return mAudioFallbackMode;
+ }
+
+ /**
+ * @hide
+ * Sets the audio stretch mode.
+ * @param audioStretchMode
+ * @return this <code>PlaybackSettings</code> instance.
+ */
+ public PlaybackSettings setAudioStretchMode(@AudioStretchMode int audioStretchMode) {
+ mAudioStretchMode = audioStretchMode;
+ mSet |= SET_AUDIO_STRETCH_MODE;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Retrieves the audio stretch mode.
+ * @return audio stretch mode
+ * @throws IllegalStateException if the audio stretch mode is not set.
+ */
+ public @AudioStretchMode int getAudioStretchMode() {
+ if ((mSet & SET_AUDIO_STRETCH_MODE) == 0) {
+ throw new IllegalStateException("audio stretch mode not set");
+ }
+ return mAudioStretchMode;
+ }
+
+ /**
+ * Sets the pitch factor.
+ * @param pitch
+ * @return this <code>PlaybackSettings</code> instance.
+ */
+ public PlaybackSettings setPitch(float pitch) {
+ mPitch = pitch;
+ mSet |= SET_PITCH;
+ return this;
+ }
+
+ /**
+ * Retrieves the pitch factor.
+ * @return pitch
+ * @throws IllegalStateException if pitch is not set.
+ */
+ public float getPitch() {
+ if ((mSet & SET_PITCH) == 0) {
+ throw new IllegalStateException("pitch not set");
+ }
+ return mPitch;
+ }
+
+ /**
+ * Sets the speed factor.
+ * @param speed
+ * @return this <code>PlaybackSettings</code> instance.
+ */
+ public PlaybackSettings setSpeed(float speed) {
+ mSpeed = speed;
+ mSet |= SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Retrieves the speed factor.
+ * @return speed
+ * @throws IllegalStateException if speed is not set.
+ */
+ public float getSpeed() {
+ if ((mSet & SET_SPEED) == 0) {
+ throw new IllegalStateException("speed not set");
+ }
+ return mSpeed;
+ }
+}
diff --git a/media/java/android/media/SoundPool.java b/media/java/android/media/SoundPool.java
index db6b38b..88d979e 100644
--- a/media/java/android/media/SoundPool.java
+++ b/media/java/android/media/SoundPool.java
@@ -32,7 +32,6 @@ import android.os.ParcelFileDescriptor;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
-import android.os.SystemProperties;
import android.util.AndroidRuntimeException;
import android.util.Log;
@@ -112,7 +111,24 @@ import com.android.internal.app.IAppOpsService;
* resumes.</p>
*/
public class SoundPool {
- private final SoundPoolDelegate mImpl;
+ static { System.loadLibrary("soundpool"); }
+
+ // SoundPool messages
+ //
+ // must match SoundPool.h
+ private static final int SAMPLE_LOADED = 1;
+
+ private final static String TAG = "SoundPool";
+ private final static boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ private long mNativeContext; // accessed by native methods
+
+ private EventHandler mEventHandler;
+ private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
+
+ private final Object mLock;
+ private final AudioAttributes mAttributes;
+ private final IAppOpsService mAppOps;
/**
* Constructor. Constructs a SoundPool object with the following
@@ -135,68 +151,26 @@ public class SoundPool {
}
private SoundPool(int maxStreams, AudioAttributes attributes) {
- if (SystemProperties.getBoolean("config.disable_media", false)) {
- mImpl = new SoundPoolStub();
- } else {
- mImpl = new SoundPoolImpl(this, maxStreams, attributes);
+ // do native setup
+ if (native_setup(new WeakReference<SoundPool>(this), maxStreams, attributes) != 0) {
+ throw new RuntimeException("Native setup failed");
}
+ mLock = new Object();
+ mAttributes = attributes;
+ IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
+ mAppOps = IAppOpsService.Stub.asInterface(b);
}
/**
- * Builder class for {@link SoundPool} objects.
+ * Release the SoundPool resources.
+ *
+ * Release all memory and native resources used by the SoundPool
+ * object. The SoundPool can no longer be used and the reference
+ * should be set to null.
*/
- public static class Builder {
- private int mMaxStreams = 1;
- private AudioAttributes mAudioAttributes;
+ public native final void release();
- /**
- * Constructs a new Builder with the defaults format values.
- * If not provided, the maximum number of streams is 1 (see {@link #setMaxStreams(int)} to
- * change it), and the audio attributes have a usage value of
- * {@link AudioAttributes#USAGE_MEDIA} (see {@link #setAudioAttributes(AudioAttributes)} to
- * change them).
- */
- public Builder() {
- }
-
- /**
- * Sets the maximum of number of simultaneous streams that can be played simultaneously.
- * @param maxStreams a value equal to 1 or greater.
- * @return the same Builder instance
- * @throws IllegalArgumentException
- */
- public Builder setMaxStreams(int maxStreams) throws IllegalArgumentException {
- if (maxStreams <= 0) {
- throw new IllegalArgumentException(
- "Strictly positive value required for the maximum number of streams");
- }
- mMaxStreams = maxStreams;
- return this;
- }
-
- /**
- * Sets the {@link AudioAttributes}. For examples, game applications will use attributes
- * built with usage information set to {@link AudioAttributes#USAGE_GAME}.
- * @param attributes a non-null
- * @return
- */
- public Builder setAudioAttributes(AudioAttributes attributes)
- throws IllegalArgumentException {
- if (attributes == null) {
- throw new IllegalArgumentException("Invalid null AudioAttributes");
- }
- mAudioAttributes = attributes;
- return this;
- }
-
- public SoundPool build() {
- if (mAudioAttributes == null) {
- mAudioAttributes = new AudioAttributes.Builder()
- .setUsage(AudioAttributes.USAGE_MEDIA).build();
- }
- return new SoundPool(mMaxStreams, mAudioAttributes);
- }
- }
+ protected void finalize() { release(); }
/**
* Load the sound from the specified path.
@@ -207,7 +181,19 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(String path, int priority) {
- return mImpl.load(path, priority);
+ int id = 0;
+ try {
+ File f = new File(path);
+ ParcelFileDescriptor fd = ParcelFileDescriptor.open(f,
+ ParcelFileDescriptor.MODE_READ_ONLY);
+ if (fd != null) {
+ id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
+ fd.close();
+ }
+ } catch (java.io.IOException e) {
+ Log.e(TAG, "error loading " + path);
+ }
+ return id;
}
/**
@@ -226,7 +212,17 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(Context context, int resId, int priority) {
- return mImpl.load(context, resId, priority);
+ AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
+ int id = 0;
+ if (afd != null) {
+ id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
+ try {
+ afd.close();
+ } catch (java.io.IOException ex) {
+ //Log.d(TAG, "close failed:", ex);
+ }
+ }
+ return id;
}
/**
@@ -238,7 +234,15 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(AssetFileDescriptor afd, int priority) {
- return mImpl.load(afd, priority);
+ if (afd != null) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
+ } else {
+ return 0;
+ }
}
/**
@@ -256,7 +260,7 @@ public class SoundPool {
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(FileDescriptor fd, long offset, long length, int priority) {
- return mImpl.load(fd, offset, length, priority);
+ return _load(fd, offset, length, priority);
}
/**
@@ -269,9 +273,7 @@ public class SoundPool {
* @param soundID a soundID returned by the load() function
* @return true if just unloaded, false if previously unloaded
*/
- public final boolean unload(int soundID) {
- return mImpl.unload(soundID);
- }
+ public native final boolean unload(int soundID);
/**
* Play a sound from a sound ID.
@@ -299,8 +301,10 @@ public class SoundPool {
*/
public final int play(int soundID, float leftVolume, float rightVolume,
int priority, int loop, float rate) {
- return mImpl.play(
- soundID, leftVolume, rightVolume, priority, loop, rate);
+ if (isRestricted()) {
+ leftVolume = rightVolume = 0;
+ }
+ return _play(soundID, leftVolume, rightVolume, priority, loop, rate);
}
/**
@@ -314,9 +318,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void pause(int streamID) {
- mImpl.pause(streamID);
- }
+ public native final void pause(int streamID);
/**
* Resume a playback stream.
@@ -328,9 +330,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void resume(int streamID) {
- mImpl.resume(streamID);
- }
+ public native final void resume(int streamID);
/**
* Pause all active streams.
@@ -340,9 +340,7 @@ public class SoundPool {
* are playing. It also sets a flag so that any streams that
* are playing can be resumed by calling autoResume().
*/
- public final void autoPause() {
- mImpl.autoPause();
- }
+ public native final void autoPause();
/**
* Resume all previously active streams.
@@ -350,9 +348,7 @@ public class SoundPool {
* Automatically resumes all streams that were paused in previous
* calls to autoPause().
*/
- public final void autoResume() {
- mImpl.autoResume();
- }
+ public native final void autoResume();
/**
* Stop a playback stream.
@@ -365,9 +361,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void stop(int streamID) {
- mImpl.stop(streamID);
- }
+ public native final void stop(int streamID);
/**
* Set stream volume.
@@ -381,9 +375,11 @@ public class SoundPool {
* @param leftVolume left volume value (range = 0.0 to 1.0)
* @param rightVolume right volume value (range = 0.0 to 1.0)
*/
- public final void setVolume(int streamID,
- float leftVolume, float rightVolume) {
- mImpl.setVolume(streamID, leftVolume, rightVolume);
+ public final void setVolume(int streamID, float leftVolume, float rightVolume) {
+ if (isRestricted()) {
+ return;
+ }
+ _setVolume(streamID, leftVolume, rightVolume);
}
/**
@@ -404,9 +400,7 @@ public class SoundPool {
*
* @param streamID a streamID returned by the play() function
*/
- public final void setPriority(int streamID, int priority) {
- mImpl.setPriority(streamID, priority);
- }
+ public native final void setPriority(int streamID, int priority);
/**
* Set loop mode.
@@ -419,9 +413,7 @@ public class SoundPool {
* @param streamID a streamID returned by the play() function
* @param loop loop mode (0 = no loop, -1 = loop forever)
*/
- public final void setLoop(int streamID, int loop) {
- mImpl.setLoop(streamID, loop);
- }
+ public native final void setLoop(int streamID, int loop);
/**
* Change playback rate.
@@ -435,9 +427,7 @@ public class SoundPool {
* @param streamID a streamID returned by the play() function
* @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
*/
- public final void setRate(int streamID, float rate) {
- mImpl.setRate(streamID, rate);
- }
+ public native final void setRate(int streamID, float rate);
public interface OnLoadCompleteListener {
/**
@@ -454,356 +444,137 @@ public class SoundPool {
* Sets the callback hook for the OnLoadCompleteListener.
*/
public void setOnLoadCompleteListener(OnLoadCompleteListener listener) {
- mImpl.setOnLoadCompleteListener(listener);
- }
-
- /**
- * Release the SoundPool resources.
- *
- * Release all memory and native resources used by the SoundPool
- * object. The SoundPool can no longer be used and the reference
- * should be set to null.
- */
- public final void release() {
- mImpl.release();
- }
-
- /**
- * Interface for SoundPool implementations.
- * SoundPool is statically referenced and unconditionally called from all
- * over the framework, so we can't simply omit the class or make it throw
- * runtime exceptions, as doing so would break the framework. Instead we
- * now select either a real or no-op impl object based on whether media is
- * enabled.
- *
- * @hide
- */
- /* package */ interface SoundPoolDelegate {
- public int load(String path, int priority);
- public int load(Context context, int resId, int priority);
- public int load(AssetFileDescriptor afd, int priority);
- public int load(
- FileDescriptor fd, long offset, long length, int priority);
- public boolean unload(int soundID);
- public int play(
- int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
- public void pause(int streamID);
- public void resume(int streamID);
- public void autoPause();
- public void autoResume();
- public void stop(int streamID);
- public void setVolume(int streamID, float leftVolume, float rightVolume);
- public void setVolume(int streamID, float volume);
- public void setPriority(int streamID, int priority);
- public void setLoop(int streamID, int loop);
- public void setRate(int streamID, float rate);
- public void setOnLoadCompleteListener(OnLoadCompleteListener listener);
- public void release();
- }
-
-
- /**
- * Real implementation of the delegate interface. This was formerly the
- * body of SoundPool itself.
- */
- /* package */ static class SoundPoolImpl implements SoundPoolDelegate {
- static { System.loadLibrary("soundpool"); }
-
- private final static String TAG = "SoundPool";
- private final static boolean DEBUG = false;
-
- private long mNativeContext; // accessed by native methods
-
- private EventHandler mEventHandler;
- private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
- private SoundPool mProxy;
-
- private final Object mLock;
- private final AudioAttributes mAttributes;
- private final IAppOpsService mAppOps;
-
- // SoundPool messages
- //
- // must match SoundPool.h
- private static final int SAMPLE_LOADED = 1;
-
- public SoundPoolImpl(SoundPool proxy, int maxStreams, AudioAttributes attr) {
-
- // do native setup
- if (native_setup(new WeakReference(this), maxStreams, attr) != 0) {
- throw new RuntimeException("Native setup failed");
- }
- mLock = new Object();
- mProxy = proxy;
- mAttributes = attr;
- IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
- mAppOps = IAppOpsService.Stub.asInterface(b);
- }
-
- public int load(String path, int priority)
- {
- int id = 0;
- try {
- File f = new File(path);
- ParcelFileDescriptor fd = ParcelFileDescriptor.open(f, ParcelFileDescriptor.MODE_READ_ONLY);
- if (fd != null) {
- id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
- fd.close();
- }
- } catch (java.io.IOException e) {
- Log.e(TAG, "error loading " + path);
- }
- return id;
- }
-
- @Override
- public int load(Context context, int resId, int priority) {
- AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
- int id = 0;
- if (afd != null) {
- id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
- try {
- afd.close();
- } catch (java.io.IOException ex) {
- //Log.d(TAG, "close failed:", ex);
- }
- }
- return id;
- }
-
- @Override
- public int load(AssetFileDescriptor afd, int priority) {
- if (afd != null) {
- long len = afd.getLength();
- if (len < 0) {
- throw new AndroidRuntimeException("no length for fd");
+ synchronized(mLock) {
+ if (listener != null) {
+ // setup message handler
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else {
+ mEventHandler = null;
}
- return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
} else {
- return 0;
+ mEventHandler = null;
}
+ mOnLoadCompleteListener = listener;
}
+ }
- @Override
- public int load(FileDescriptor fd, long offset, long length, int priority) {
- return _load(fd, offset, length, priority);
- }
-
- private native final int _load(FileDescriptor fd, long offset, long length, int priority);
-
- @Override
- public native final boolean unload(int soundID);
-
- @Override
- public final int play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate) {
- if (isRestricted()) {
- leftVolume = rightVolume = 0;
- }
- return _play(soundID, leftVolume, rightVolume, priority, loop, rate);
+ private boolean isRestricted() {
+ if ((mAttributes.getFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
+ return false;
}
-
- public native final int _play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
-
- private boolean isRestricted() {
- if ((mAttributes.getFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
- return false;
- }
- try {
- final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO,
- mAttributes.getUsage(),
- Process.myUid(), ActivityThread.currentPackageName());
- return mode != AppOpsManager.MODE_ALLOWED;
- } catch (RemoteException e) {
- return false;
- }
+ try {
+ final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO,
+ mAttributes.getUsage(),
+ Process.myUid(), ActivityThread.currentPackageName());
+ return mode != AppOpsManager.MODE_ALLOWED;
+ } catch (RemoteException e) {
+ return false;
}
+ }
- @Override
- public native final void pause(int streamID);
+ private native final int _load(FileDescriptor fd, long offset, long length, int priority);
- @Override
- public native final void resume(int streamID);
+ private native final int native_setup(Object weakRef, int maxStreams,
+ Object/*AudioAttributes*/ attributes);
- @Override
- public native final void autoPause();
+ private native final int _play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate);
- @Override
- public native final void autoResume();
+ private native final void _setVolume(int streamID, float leftVolume, float rightVolume);
- @Override
- public native final void stop(int streamID);
+ // post event from native code to message handler
+ @SuppressWarnings("unchecked")
+ private static void postEventFromNative(Object ref, int msg, int arg1, int arg2, Object obj) {
+ SoundPool soundPool = ((WeakReference<SoundPool>) ref).get();
+ if (soundPool == null)
+ return;
- @Override
- public final void setVolume(int streamID, float leftVolume, float rightVolume) {
- if (isRestricted()) {
- return;
- }
- _setVolume(streamID, leftVolume, rightVolume);
+ if (soundPool.mEventHandler != null) {
+ Message m = soundPool.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
+ soundPool.mEventHandler.sendMessage(m);
}
+ }
- private native final void _setVolume(int streamID, float leftVolume, float rightVolume);
-
- @Override
- public void setVolume(int streamID, float volume) {
- setVolume(streamID, volume, volume);
+ private final class EventHandler extends Handler {
+ public EventHandler(Looper looper) {
+ super(looper);
}
@Override
- public native final void setPriority(int streamID, int priority);
-
- @Override
- public native final void setLoop(int streamID, int loop);
-
- @Override
- public native final void setRate(int streamID, float rate);
-
- @Override
- public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener)
- {
- synchronized(mLock) {
- if (listener != null) {
- // setup message handler
- Looper looper;
- if ((looper = Looper.myLooper()) != null) {
- mEventHandler = new EventHandler(mProxy, looper);
- } else if ((looper = Looper.getMainLooper()) != null) {
- mEventHandler = new EventHandler(mProxy, looper);
- } else {
- mEventHandler = null;
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case SAMPLE_LOADED:
+ if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
+ synchronized(mLock) {
+ if (mOnLoadCompleteListener != null) {
+ mOnLoadCompleteListener.onLoadComplete(SoundPool.this, msg.arg1, msg.arg2);
}
- } else {
- mEventHandler = null;
}
- mOnLoadCompleteListener = listener;
- }
- }
-
- private class EventHandler extends Handler
- {
- private SoundPool mSoundPool;
-
- public EventHandler(SoundPool soundPool, Looper looper) {
- super(looper);
- mSoundPool = soundPool;
- }
-
- @Override
- public void handleMessage(Message msg) {
- switch(msg.what) {
- case SAMPLE_LOADED:
- if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
- synchronized(mLock) {
- if (mOnLoadCompleteListener != null) {
- mOnLoadCompleteListener.onLoadComplete(mSoundPool, msg.arg1, msg.arg2);
- }
- }
- break;
- default:
- Log.e(TAG, "Unknown message type " + msg.what);
- return;
- }
- }
- }
-
- // post event from native code to message handler
- private static void postEventFromNative(Object weakRef, int msg, int arg1, int arg2, Object obj)
- {
- SoundPoolImpl soundPoolImpl = (SoundPoolImpl)((WeakReference)weakRef).get();
- if (soundPoolImpl == null)
+ break;
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
return;
-
- if (soundPoolImpl.mEventHandler != null) {
- Message m = soundPoolImpl.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
- soundPoolImpl.mEventHandler.sendMessage(m);
}
}
-
- public native final void release();
-
- private native final int native_setup(Object weakRef, int maxStreams,
- Object/*AudioAttributes*/ attributes);
-
- protected void finalize() { release(); }
}
/**
- * No-op implementation of SoundPool.
- * Used when media is disabled by the system.
- * @hide
+ * Builder class for {@link SoundPool} objects.
*/
- /* package */ static class SoundPoolStub implements SoundPoolDelegate {
- public SoundPoolStub() { }
-
- public int load(String path, int priority) {
- return 0;
- }
-
- @Override
- public int load(Context context, int resId, int priority) {
- return 0;
- }
-
- @Override
- public int load(AssetFileDescriptor afd, int priority) {
- return 0;
- }
-
- @Override
- public int load(FileDescriptor fd, long offset, long length, int priority) {
- return 0;
- }
+ public static class Builder {
+ private int mMaxStreams = 1;
+ private AudioAttributes mAudioAttributes;
- @Override
- public final boolean unload(int soundID) {
- return true;
+ /**
+ * Constructs a new Builder with the defaults format values.
+ * If not provided, the maximum number of streams is 1 (see {@link #setMaxStreams(int)} to
+ * change it), and the audio attributes have a usage value of
+ * {@link AudioAttributes#USAGE_MEDIA} (see {@link #setAudioAttributes(AudioAttributes)} to
+ * change them).
+ */
+ public Builder() {
}
- @Override
- public final int play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate) {
- return 0;
+ /**
+ * Sets the maximum of number of simultaneous streams that can be played simultaneously.
+ * @param maxStreams a value equal to 1 or greater.
+ * @return the same Builder instance
+ * @throws IllegalArgumentException
+ */
+ public Builder setMaxStreams(int maxStreams) throws IllegalArgumentException {
+ if (maxStreams <= 0) {
+ throw new IllegalArgumentException(
+ "Strictly positive value required for the maximum number of streams");
+ }
+ mMaxStreams = maxStreams;
+ return this;
}
- @Override
- public final void pause(int streamID) { }
-
- @Override
- public final void resume(int streamID) { }
-
- @Override
- public final void autoPause() { }
-
- @Override
- public final void autoResume() { }
-
- @Override
- public final void stop(int streamID) { }
-
- @Override
- public final void setVolume(int streamID,
- float leftVolume, float rightVolume) { }
-
- @Override
- public void setVolume(int streamID, float volume) {
+ /**
+ * Sets the {@link AudioAttributes}. For examples, game applications will use attributes
+ * built with usage information set to {@link AudioAttributes#USAGE_GAME}.
+ * @param attributes a non-null
+ * @return
+ */
+ public Builder setAudioAttributes(AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Invalid null AudioAttributes");
+ }
+ mAudioAttributes = attributes;
+ return this;
}
- @Override
- public final void setPriority(int streamID, int priority) { }
-
- @Override
- public final void setLoop(int streamID, int loop) { }
-
- @Override
- public final void setRate(int streamID, float rate) { }
-
- @Override
- public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener) {
+ public SoundPool build() {
+ if (mAudioAttributes == null) {
+ mAudioAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA).build();
+ }
+ return new SoundPool(mMaxStreams, mAudioAttributes);
}
-
- @Override
- public final void release() { }
}
}
diff --git a/media/java/android/media/SyncSettings.java b/media/java/android/media/SyncSettings.java
new file mode 100644
index 0000000..9740147
--- /dev/null
+++ b/media/java/android/media/SyncSettings.java
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import android.annotation.IntDef;
+
+/**
+ * Structure for common A/V sync settings.
+ *
+ * Used by {@link MediaSync} {link MediaSync#getSyncSettings()} and
+ * {link MediaSync#setSyncSettings(SyncSettings)}
+ * to control A/V sync behavior.
+ * <p> <strong>audio adjust mode:</strong>
+ * select handling of audio track when changing playback speed due to sync.
+ * <ul>
+ * <li> {@link SyncSettings#AUDIO_ADJUST_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link SyncSettings#AUDIO_ADJUST_MODE_STRETCH}:
+ * Change the speed of audio playback without altering its pitch.</li>
+ * <li> {@link SyncSettings#AUDIO_ADJUST_MODE_RESAMPLE}:
+ * Change the speed of audio playback by resampling the audio.</li>
+ * </ul>
+ * <p> <strong>sync source:</strong> select
+ * clock source for sync.
+ * <ul>
+ * <li> {@link SyncSettings#SYNC_SOURCE_DEFAULT}:
+ * System will determine best selection.</li>
+ * <li> {@link SyncSettings#SYNC_SOURCE_SYSTEM_CLOCK}:
+ * Use system clock for sync source.</li>
+ * <li> {@link SyncSettings#SYNC_SOURCE_AUDIO}:
+ * Use audio track for sync source.</li>
+ * <li> {@link SyncSettings#SYNC_SOURCE_VSYNC}:
+ * Syncronize media to vsync.</li>
+ * </ul>
+ * <p> <strong>tolerance:</strong> specifies the amount of allowed playback rate
+ * change to keep media in sync with the sync source. The handling of this depends
+ * on the sync source.
+ * <p> <strong>frameRate:</strong> initial hint for video frame rate. Used when
+ * sync source is vsync.
+ */
+public final class SyncSettings {
+ /** @hide */
+ @IntDef(
+ value = {
+ SYNC_SOURCE_DEFAULT,
+ SYNC_SOURCE_SYSTEM_CLOCK,
+ SYNC_SOURCE_AUDIO,
+ SYNC_SOURCE_VSYNC,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SyncSource {}
+
+ /**
+ * Use the default sync source (default). If media has video, the sync renders to a
+ * surface that directly renders to a display, and tolerance is non zero (e.g. not
+ * less than 0.001) vsync source is used for clock source. Otherwise, if media has
+ * audio, audio track is used. Finally, if media has no audio, system clock is used.
+ */
+ public static final int SYNC_SOURCE_DEFAULT = 0;
+
+ /**
+ * Use system monotonic clock for sync source.
+ *
+ * @see System#nanoTime
+ */
+ public static final int SYNC_SOURCE_SYSTEM_CLOCK = 1;
+
+ /**
+ * Use audio track for sync source. This requires audio data and an audio track.
+ *
+ * @see AudioTrack#getTimeStamp
+ */
+ public static final int SYNC_SOURCE_AUDIO = 2;
+
+ /**
+ * Use vsync as the sync source. This requires video data and an output surface that
+ * directly renders to the display, e.g. {@link android.view.SurfaceView}
+ * <p>
+ * This mode allows smoother playback experience by adjusting the playback speed
+ * to match the vsync rate, e.g. playing 30fps content on a 59.94Hz display.
+ * When using this mode, the tolerance should be set to greater than 0 (e.g. at least
+ * 1/1000), so that the playback speed can actually be adjusted.
+ * <p>
+ * This mode can also be used to play 25fps content on a 60Hz display using
+ * a 2:3 pulldown (basically playing the content at 24fps), which results on
+ * better playback experience on most devices. In this case the tolerance should be
+ * at least (1/24).
+ *
+ * @see android.view.Choreographer.FrameCallback#doFrame
+ * @see android.view.Display#getAppVsyncOffsetNanos
+ */
+ public static final int SYNC_SOURCE_VSYNC = 3;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_ADJUST_MODE_DEFAULT,
+ AUDIO_ADJUST_MODE_STRETCH,
+ AUDIO_ADJUST_MODE_RESAMPLE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioAdjustMode {}
+
+ /**
+ * System will determine best handling of audio for playback rate
+ * adjustments.
+ * <p>
+ * Used by default. This will make audio play faster or slower as required
+ * by the sync source without changing its pitch; however, system may fall
+ * back to some other method (e.g. change the pitch, or mute the audio) if
+ * time stretching is no longer supported for the playback rate.
+ */
+ public static final int AUDIO_ADJUST_MODE_DEFAULT = 0;
+
+ /**
+ * Time stretch audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * without changing its pitch, as long as it is supported for the playback
+ * rate.
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ */
+ public static final int AUDIO_ADJUST_MODE_STRETCH = 1;
+
+ /**
+ * Resample audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * by changing its pitch (making it lower to play slower, and higher to play
+ * faster.)
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ */
+ public static final int AUDIO_ADJUST_MODE_RESAMPLE = 2;
+
+ // flags to indicate which settings are actually set
+ private static final int SET_SYNC_SOURCE = 1 << 0;
+ private static final int SET_AUDIO_ADJUST_MODE = 1 << 1;
+ private static final int SET_TOLERANCE = 1 << 2;
+ private static final int SET_FRAME_RATE = 1 << 3;
+ private int mSet = 0;
+
+ // settings
+ private int mAudioAdjustMode = AUDIO_ADJUST_MODE_STRETCH;
+ private int mSyncSource = SYNC_SOURCE_DEFAULT;
+ private float mTolerance = 0.f;
+ private float mFrameRate = 0.f;
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>SyncSettings</code> instance.
+ */
+ public SyncSettings allowDefaults() {
+ mSet |= SET_SYNC_SOURCE | SET_AUDIO_ADJUST_MODE | SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Sets the audio adjust mode.
+ * @param audioAdjustMode
+ * @return this <code>SyncSettings</code> instance.
+ */
+ public SyncSettings setAudioAdjustMode(@AudioAdjustMode int audioAdjustMode) {
+ mAudioAdjustMode = audioAdjustMode;
+ mSet |= SET_AUDIO_ADJUST_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio adjust mode.
+ * @return audio adjust mode
+ * @throws IllegalStateException if the audio adjust mode is not set.
+ */
+ public @AudioAdjustMode int getAudioAdjustMode() {
+ if ((mSet & SET_AUDIO_ADJUST_MODE) == 0) {
+ throw new IllegalStateException("audio adjust mode not set");
+ }
+ return mAudioAdjustMode;
+ }
+
+ /**
+ * Sets the sync source.
+ * @param syncSource
+ * @return this <code>SyncSettings</code> instance.
+ */
+ public SyncSettings setSyncSource(@SyncSource int syncSource) {
+ mSyncSource = syncSource;
+ mSet |= SET_SYNC_SOURCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the sync source.
+ * @return sync source
+ * @throws IllegalStateException if the sync source is not set.
+ */
+ public @SyncSource int getSyncSource() {
+ if ((mSet & SET_SYNC_SOURCE) == 0) {
+ throw new IllegalStateException("sync source not set");
+ }
+ return mSyncSource;
+ }
+
+ /**
+ * Sets the tolerance. The default tolerance is 0.
+ * @param tolerance A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @return this <code>SyncSettings</code> instance.
+ */
+ public SyncSettings setTolerance(float tolerance) {
+ mTolerance = tolerance;
+ mSet |= SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the tolerance factor.
+ * @return tolerance factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @throws IllegalStateException if tolerance is not set.
+ */
+ public float getTolerance() {
+ if ((mSet & SET_TOLERANCE) == 0) {
+ throw new IllegalStateException("tolerance not set");
+ }
+ return mTolerance;
+ }
+
+ /**
+ * Sets the video frame rate hint to be used. By default the frame rate is unspecified.
+ * @param frameRate A non-negative number used as an initial hint on
+ * the video frame rate to be used when using vsync as the sync source.
+ * @return this <code>SyncSettings</code> instance.
+ */
+ public SyncSettings setFrameRate(float frameRate) {
+ mFrameRate = frameRate;
+ mSet |= SET_FRAME_RATE;
+ return this;
+ }
+
+ /**
+ * Retrieves the video frame rate hint.
+ * @return frame rate factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @throws IllegalStateException if frame rate is not set.
+ */
+ public float getFrameRate() {
+ if ((mSet & SET_FRAME_RATE) == 0) {
+ throw new IllegalStateException("frame rate not set");
+ }
+ return mFrameRate;
+ }
+
+}
diff --git a/media/java/android/media/TimedMetaData.java b/media/java/android/media/TimedMetaData.java
new file mode 100644
index 0000000..dceb050
--- /dev/null
+++ b/media/java/android/media/TimedMetaData.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+
+/**
+ * Class that embodies a piece of timed metadata, including
+ *
+ * <ul>
+ * <li> a time stamp, and </li>
+ * <li> raw uninterpreted byte-array extracted directly from the container. </li>
+ * </ul>
+ *
+ * @see MediaPlayer#setOnTimedMetaDataListener(android.media.MediaPlayer.OnTimedMetaDataListener)
+ */
+
+public class TimedMetaData {
+ private static final String TAG = "TimedMetaData";
+
+ private long mTimeUs;
+ private byte[] mRawData;
+
+ /**
+ * @hide
+ */
+ static TimedMetaData createTimedMetaDataFromParcel(Parcel parcel) {
+ return new TimedMetaData(parcel);
+ }
+
+ private TimedMetaData(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ public long getTimeUs() {
+ return mTimeUs;
+ }
+
+ public byte[] getRawData() {
+ return mRawData;
+ }
+
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ mTimeUs = parcel.readLong();
+ mRawData = new byte[parcel.readInt()];
+ parcel.readByteArray(mRawData);
+
+ return true;
+ }
+}
diff --git a/media/java/android/media/VolumePolicy.java b/media/java/android/media/VolumePolicy.java
index 2d3376a..1d33128 100644
--- a/media/java/android/media/VolumePolicy.java
+++ b/media/java/android/media/VolumePolicy.java
@@ -19,6 +19,8 @@ package android.media;
import android.os.Parcel;
import android.os.Parcelable;
+import java.util.Objects;
+
/** @hide */
public final class VolumePolicy implements Parcelable {
public static final VolumePolicy DEFAULT = new VolumePolicy(false, false, true, 400);
@@ -53,6 +55,23 @@ public final class VolumePolicy implements Parcelable {
}
@Override
+ public int hashCode() {
+ return Objects.hash(volumeDownToEnterSilent, volumeUpToExitSilent, doNotDisturbWhenSilent,
+ vibrateToSilentDebounce);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof VolumePolicy)) return false;
+ if (o == this) return true;
+ final VolumePolicy other = (VolumePolicy) o;
+ return other.volumeDownToEnterSilent == volumeDownToEnterSilent
+ && other.volumeUpToExitSilent == volumeUpToExitSilent
+ && other.doNotDisturbWhenSilent == doNotDisturbWhenSilent
+ && other.vibrateToSilentDebounce == vibrateToSilentDebounce;
+ }
+
+ @Override
public int describeContents() {
return 0;
}
diff --git a/media/java/android/media/audiofx/AcousticEchoCanceler.java b/media/java/android/media/audiofx/AcousticEchoCanceler.java
index 4b59c88..f5f98ef 100644
--- a/media/java/android/media/audiofx/AcousticEchoCanceler.java
+++ b/media/java/android/media/audiofx/AcousticEchoCanceler.java
@@ -68,9 +68,8 @@ public class AcousticEchoCanceler extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return aec;
}
+ return aec;
}
/**
diff --git a/media/java/android/media/audiofx/AutomaticGainControl.java b/media/java/android/media/audiofx/AutomaticGainControl.java
index 83eb4e9..4a6b1f3 100644
--- a/media/java/android/media/audiofx/AutomaticGainControl.java
+++ b/media/java/android/media/audiofx/AutomaticGainControl.java
@@ -68,9 +68,8 @@ public class AutomaticGainControl extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return agc;
}
+ return agc;
}
/**
diff --git a/media/java/android/media/audiofx/NoiseSuppressor.java b/media/java/android/media/audiofx/NoiseSuppressor.java
index 0ea42ab..bca990f 100644
--- a/media/java/android/media/audiofx/NoiseSuppressor.java
+++ b/media/java/android/media/audiofx/NoiseSuppressor.java
@@ -70,9 +70,8 @@ public class NoiseSuppressor extends AudioEffect {
Log.w(TAG, "not enough resources");
} catch (RuntimeException e) {
Log.w(TAG, "not enough memory");
- } finally {
- return ns;
}
+ return ns;
}
/**
diff --git a/media/java/android/media/audiofx/Virtualizer.java b/media/java/android/media/audiofx/Virtualizer.java
index be5adc8..49e56bc 100644
--- a/media/java/android/media/audiofx/Virtualizer.java
+++ b/media/java/android/media/audiofx/Virtualizer.java
@@ -17,7 +17,7 @@
package android.media.audiofx;
import android.annotation.IntDef;
-import android.media.AudioDevice;
+import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.audiofx.AudioEffect;
import android.util.Log;
@@ -204,7 +204,7 @@ public class Virtualizer extends AudioEffect {
// convert channel mask to internal native representation
paramsConverter.putInt(AudioFormat.convertChannelOutMaskToNativeMask(channelMask));
// convert Java device type to internal representation
- paramsConverter.putInt(AudioDevice.convertDeviceTypeToInternalDevice(deviceType));
+ paramsConverter.putInt(AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType));
// allocate an array to store the results
byte[] result = new byte[nbChannels * 4/*int to byte*/ * 3/*for mask, azimuth, elevation*/];
@@ -305,9 +305,9 @@ public class Virtualizer extends AudioEffect {
throws IllegalArgumentException {
switch (virtualizationMode) {
case VIRTUALIZATION_MODE_BINAURAL:
- return AudioDevice.TYPE_WIRED_HEADPHONES;
+ return AudioDeviceInfo.TYPE_WIRED_HEADPHONES;
case VIRTUALIZATION_MODE_TRANSAURAL:
- return AudioDevice.TYPE_BUILTIN_SPEAKER;
+ return AudioDeviceInfo.TYPE_BUILTIN_SPEAKER;
default:
throw (new IllegalArgumentException(
"Virtualizer: illegal virtualization mode " + virtualizationMode));
@@ -317,7 +317,7 @@ public class Virtualizer extends AudioEffect {
private static int getDeviceForModeForce(@ForceVirtualizationMode int virtualizationMode)
throws IllegalArgumentException {
if (virtualizationMode == VIRTUALIZATION_MODE_AUTO) {
- return AudioDevice.TYPE_UNKNOWN;
+ return AudioDeviceInfo.TYPE_UNKNOWN;
} else {
return getDeviceForModeQuery(virtualizationMode);
}
@@ -325,24 +325,24 @@ public class Virtualizer extends AudioEffect {
private static int deviceToMode(int deviceType) {
switch (deviceType) {
- case AudioDevice.TYPE_WIRED_HEADSET:
- case AudioDevice.TYPE_WIRED_HEADPHONES:
- case AudioDevice.TYPE_BLUETOOTH_SCO:
- case AudioDevice.TYPE_BUILTIN_EARPIECE:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
return VIRTUALIZATION_MODE_BINAURAL;
- case AudioDevice.TYPE_BUILTIN_SPEAKER:
- case AudioDevice.TYPE_LINE_ANALOG:
- case AudioDevice.TYPE_LINE_DIGITAL:
- case AudioDevice.TYPE_BLUETOOTH_A2DP:
- case AudioDevice.TYPE_HDMI:
- case AudioDevice.TYPE_HDMI_ARC:
- case AudioDevice.TYPE_USB_DEVICE:
- case AudioDevice.TYPE_USB_ACCESSORY:
- case AudioDevice.TYPE_DOCK:
- case AudioDevice.TYPE_FM:
- case AudioDevice.TYPE_AUX_LINE:
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_HDMI:
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ case AudioDeviceInfo.TYPE_DOCK:
+ case AudioDeviceInfo.TYPE_FM:
+ case AudioDeviceInfo.TYPE_AUX_LINE:
return VIRTUALIZATION_MODE_TRANSAURAL;
- case AudioDevice.TYPE_UNKNOWN:
+ case AudioDeviceInfo.TYPE_UNKNOWN:
default:
return VIRTUALIZATION_MODE_OFF;
}
@@ -433,7 +433,7 @@ public class Virtualizer extends AudioEffect {
throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
// convert Java device type to internal representation
int deviceType = getDeviceForModeForce(virtualizationMode);
- int internalDevice = AudioDevice.convertDeviceTypeToInternalDevice(deviceType);
+ int internalDevice = AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType);
int status = setParameter(PARAM_FORCE_VIRTUALIZATION_MODE, internalDevice);
@@ -470,7 +470,7 @@ public class Virtualizer extends AudioEffect {
int[] value = new int[1];
int status = getParameter(PARAM_VIRTUALIZATION_MODE, value);
if (status >= 0) {
- return deviceToMode(AudioDevice.convertInternalDeviceToDeviceType(value[0]));
+ return deviceToMode(AudioDeviceInfo.convertInternalDeviceToDeviceType(value[0]));
} else if (status == AudioEffect.ERROR_BAD_VALUE) {
return VIRTUALIZATION_MODE_OFF;
} else {
diff --git a/media/java/android/media/audiopolicy/AudioMix.java b/media/java/android/media/audiopolicy/AudioMix.java
index 1806662..6aa4d8a 100644
--- a/media/java/android/media/audiopolicy/AudioMix.java
+++ b/media/java/android/media/audiopolicy/AudioMix.java
@@ -36,6 +36,7 @@ public class AudioMix {
private int mRouteFlags;
private String mRegistrationId;
private int mMixType = MIX_TYPE_INVALID;
+ private int mMixState = MIX_STATE_DISABLED;
/**
* All parameters are guaranteed valid through the Builder.
@@ -48,6 +49,7 @@ public class AudioMix {
mMixType = rule.getTargetMixType();
}
+ // ROUTE_FLAG_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
/**
* An audio mix behavior where the output of the mix is sent to the original destination of
* the audio signal, i.e. an output device for an output mix, or a recording for an input mix.
@@ -62,6 +64,7 @@ public class AudioMix {
@SystemApi
public static final int ROUTE_FLAG_LOOP_BACK = 0x1 << 1;
+ // MIX_TYPE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
/**
* @hide
* Invalid mix type, default value.
@@ -78,6 +81,39 @@ public class AudioMix {
*/
public static final int MIX_TYPE_RECORDERS = 1;
+
+ // MIX_STATE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
+ /**
+ * @hide
+ * State of a mix before its policy is enabled.
+ */
+ @SystemApi
+ public static final int MIX_STATE_DISABLED = -1;
+ /**
+ * @hide
+ * State of a mix when there is no audio to mix.
+ */
+ @SystemApi
+ public static final int MIX_STATE_IDLE = 0;
+ /**
+ * @hide
+ * State of a mix that is actively mixing audio.
+ */
+ @SystemApi
+ public static final int MIX_STATE_MIXING = 1;
+
+ /**
+ * @hide
+ * The current mixing state.
+ * @return one of {@link #MIX_STATE_DISABLED}, {@link #MIX_STATE_IDLE},
+ * {@link #MIX_STATE_MIXING}.
+ */
+ @SystemApi
+ public int getMixState() {
+ return mMixState;
+ }
+
+
int getRouteFlags() {
return mRouteFlags;
}
diff --git a/media/java/android/media/midi/IMidiDeviceServer.aidl b/media/java/android/media/midi/IMidiDeviceServer.aidl
index 642078a..96d12fd 100644
--- a/media/java/android/media/midi/IMidiDeviceServer.aidl
+++ b/media/java/android/media/midi/IMidiDeviceServer.aidl
@@ -16,6 +16,7 @@
package android.media.midi;
+import android.media.midi.MidiDeviceInfo;
import android.os.ParcelFileDescriptor;
/** @hide */
@@ -27,4 +28,6 @@ interface IMidiDeviceServer
// connects the input port pfd to the specified output port
void connectPorts(IBinder token, in ParcelFileDescriptor pfd, int outputPortNumber);
+
+ MidiDeviceInfo getDeviceInfo();
}
diff --git a/media/java/android/media/midi/MidiDevice.java b/media/java/android/media/midi/MidiDevice.java
index 569f7c6..6b36554 100644
--- a/media/java/android/media/midi/MidiDevice.java
+++ b/media/java/android/media/midi/MidiDevice.java
@@ -34,9 +34,6 @@ import java.io.IOException;
/**
* This class is used for sending and receiving data to and from a MIDI device
* Instances of this class are created by {@link MidiManager#openDevice}.
- *
- * CANDIDATE FOR PUBLIC API
- * @hide
*/
public final class MidiDevice implements Closeable {
private static final String TAG = "MidiDevice";
@@ -49,6 +46,11 @@ public final class MidiDevice implements Closeable {
private final CloseGuard mGuard = CloseGuard.get();
+ /**
+ * This class represents a connection between the output port of one device
+ * and the input port of another. Created by {@link #connectPorts}.
+ * Close this object to terminate the connection.
+ */
public class MidiConnection implements Closeable {
private final IBinder mToken;
private final MidiInputPort mInputPort;
@@ -134,11 +136,11 @@ public final class MidiDevice implements Closeable {
/**
* Connects the supplied {@link MidiInputPort} to the output port of this device
* with the specified port number. Once the connection is made, the MidiInput port instance
- * can no longer receive data via its {@link MidiReciever.receive} method.
- * This method returns a {@link #MidiConnection} object, which can be used to close the connection
+ * can no longer receive data via its {@link MidiReceiver#onReceive} method.
+ * This method returns a {@link MidiDevice.MidiConnection} object, which can be used to close the connection
* @param inputPort the inputPort to connect
* @param outputPortNumber the port number of the output port to connect inputPort to.
- * @return {@link #MidiConnection} object if the connection is successful, or null in case of failure
+ * @return {@link MidiDevice.MidiConnection} object if the connection is successful, or null in case of failure
*/
public MidiConnection connectPorts(MidiInputPort inputPort, int outputPortNumber) {
if (outputPortNumber < 0 || outputPortNumber >= mDeviceInfo.getOutputPortCount()) {
diff --git a/media/java/android/media/midi/MidiDeviceInfo.java b/media/java/android/media/midi/MidiDeviceInfo.java
index 93e0939..af108eb 100644
--- a/media/java/android/media/midi/MidiDeviceInfo.java
+++ b/media/java/android/media/midi/MidiDeviceInfo.java
@@ -43,6 +43,11 @@ public final class MidiDeviceInfo implements Parcelable {
public static final int TYPE_VIRTUAL = 2;
/**
+ * Constant representing Bluetooth MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_BLUETOOTH = 3;
+
+ /**
* Bundle key for the device's user visible name property.
* Used with the {@link android.os.Bundle} returned by {@link #getProperties}.
* For USB devices, this is a concatenation of the manufacturer and product names.
@@ -78,6 +83,13 @@ public final class MidiDeviceInfo implements Parcelable {
public static final String PROPERTY_USB_DEVICE = "usb_device";
/**
+ * Bundle key for the device's {@link android.bluetooth.BluetoothDevice}.
+ * Only set for Bluetooth MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ */
+ public static final String PROPERTY_BLUETOOTH_DEVICE = "bluetooth_device";
+
+ /**
* Bundle key for the device's ALSA card number.
* Only set for USB MIDI devices.
* Used with the {@link android.os.Bundle} returned by {@link #getProperties}
@@ -225,29 +237,23 @@ public final class MidiDeviceInfo implements Parcelable {
}
/**
- * Returns information about an input port.
+ * Returns information about the device's ports.
+ * The ports are in unspecified order.
*
- * @param portNumber the number of the input port
- * @return the input port's information object
+ * @return array of {@link PortInfo}
*/
- public PortInfo getInputPortInfo(int portNumber) {
- if (portNumber < 0 || portNumber >= mInputPortCount) {
- throw new IllegalArgumentException("portNumber out of range");
- }
- return new PortInfo(PortInfo.TYPE_INPUT, portNumber, mInputPortNames[portNumber]);
- }
+ public PortInfo[] getPortList() {
+ PortInfo[] portInfoList = new PortInfo[mInputPortCount + mOutputPortCount];
- /**
- * Returns information about an output port.
- *
- * @param portNumber the number of the output port
- * @return the output port's information object
- */
- public PortInfo getOutputPortInfo(int portNumber) {
- if (portNumber < 0 || portNumber >= mOutputPortCount) {
- throw new IllegalArgumentException("portNumber out of range");
+ int index = 0;
+ for (int i = 0; i < mInputPortCount; i++) {
+ portInfoList[index++] = new PortInfo(PortInfo.TYPE_INPUT, i, mInputPortNames[i]);
+ }
+ for (int i = 0; i < mOutputPortCount; i++) {
+ portInfoList[index++] = new PortInfo(PortInfo.TYPE_OUTPUT, i, mOutputPortNames[i]);
}
- return new PortInfo(PortInfo.TYPE_OUTPUT, portNumber, mOutputPortNames[portNumber]);
+
+ return portInfoList;
}
/**
diff --git a/media/java/android/media/midi/MidiDeviceServer.java b/media/java/android/media/midi/MidiDeviceServer.java
index d27351f..a316a44 100644
--- a/media/java/android/media/midi/MidiDeviceServer.java
+++ b/media/java/android/media/midi/MidiDeviceServer.java
@@ -24,6 +24,8 @@ import android.os.RemoteException;
import android.system.OsConstants;
import android.util.Log;
+import com.android.internal.midi.MidiDispatcher;
+
import dalvik.system.CloseGuard;
import libcore.io.IoUtils;
@@ -250,6 +252,11 @@ public final class MidiDeviceServer implements Closeable {
mPortClients.put(token, client);
}
}
+
+ @Override
+ public MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
};
/* package */ MidiDeviceServer(IMidiManager midiManager, MidiReceiver[] inputPortReceivers,
@@ -277,6 +284,10 @@ public final class MidiDeviceServer implements Closeable {
return mServer;
}
+ public IBinder asBinder() {
+ return mServer.asBinder();
+ }
+
/* package */ void setDeviceInfo(MidiDeviceInfo deviceInfo) {
if (mDeviceInfo != null) {
throw new IllegalStateException("setDeviceInfo should only be called once");
diff --git a/media/java/android/media/midi/MidiDeviceService.java b/media/java/android/media/midi/MidiDeviceService.java
index 8b1de3e..ce12a4f 100644
--- a/media/java/android/media/midi/MidiDeviceService.java
+++ b/media/java/android/media/midi/MidiDeviceService.java
@@ -91,7 +91,7 @@ abstract public class MidiDeviceService extends Service {
/**
* Returns an array of {@link MidiReceiver} for the device's input ports.
* Subclasses must override this to provide the receivers which will receive
- * data sent to the device's input ports. An empty array or null should be returned if
+ * data sent to the device's input ports. An empty array should be returned if
* the device has no input ports.
* @return array of MidiReceivers
*/
diff --git a/media/java/android/media/midi/MidiDispatcher.java b/media/java/android/media/midi/MidiDispatcher.java
deleted file mode 100644
index 0868346..0000000
--- a/media/java/android/media/midi/MidiDispatcher.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.midi;
-
-import java.io.IOException;
-import java.util.concurrent.CopyOnWriteArrayList;
-
-/**
- * Utility class for dispatching MIDI data to a list of {@link MidiReceiver}s.
- * This class subclasses {@link MidiReceiver} and dispatches any data it receives
- * to its receiver list. Any receivers that throw an exception upon receiving data will
- * be automatically removed from the receiver list, but no IOException will be returned
- * from the dispatcher's {@link #onReceive} in that case.
- *
- * @hide
- */
-public final class MidiDispatcher extends MidiReceiver {
-
- private final CopyOnWriteArrayList<MidiReceiver> mReceivers
- = new CopyOnWriteArrayList<MidiReceiver>();
-
- private final MidiSender mSender = new MidiSender() {
- /**
- * Called to connect a {@link MidiReceiver} to the sender
- *
- * @param receiver the receiver to connect
- */
- public void connect(MidiReceiver receiver) {
- mReceivers.add(receiver);
- }
-
- /**
- * Called to disconnect a {@link MidiReceiver} from the sender
- *
- * @param receiver the receiver to disconnect
- */
- public void disconnect(MidiReceiver receiver) {
- mReceivers.remove(receiver);
- }
- };
-
- /**
- * Returns the number of {@link MidiReceiver}s this dispatcher contains.
- * @return the number of receivers
- */
- public int getReceiverCount() {
- return mReceivers.size();
- }
-
- /**
- * Returns a {@link MidiSender} which is used to add and remove {@link MidiReceiver}s
- * to the dispatcher's receiver list.
- * @return the dispatcher's MidiSender
- */
- public MidiSender getSender() {
- return mSender;
- }
-
- @Override
- public void onReceive(byte[] msg, int offset, int count, long timestamp) throws IOException {
- for (MidiReceiver receiver : mReceivers) {
- try {
- receiver.sendWithTimestamp(msg, offset, count, timestamp);
- } catch (IOException e) {
- // if the receiver fails we remove the receiver but do not propagate the exception
- mReceivers.remove(receiver);
- }
- }
- }
-}
diff --git a/media/java/android/media/midi/MidiInputPort.java b/media/java/android/media/midi/MidiInputPort.java
index 1d3b37a..ff16a57 100644
--- a/media/java/android/media/midi/MidiInputPort.java
+++ b/media/java/android/media/midi/MidiInputPort.java
@@ -83,7 +83,18 @@ public final class MidiInputPort extends MidiReceiver implements Closeable {
if (mOutputStream == null) {
throw new IOException("MidiInputPort is closed");
}
- int length = MidiPortImpl.packMessage(msg, offset, count, timestamp, mBuffer);
+ int length = MidiPortImpl.packData(msg, offset, count, timestamp, mBuffer);
+ mOutputStream.write(mBuffer, 0, length);
+ }
+ }
+
+ @Override
+ public void flush() throws IOException {
+ synchronized (mBuffer) {
+ if (mOutputStream == null) {
+ throw new IOException("MidiInputPort is closed");
+ }
+ int length = MidiPortImpl.packFlush(mBuffer);
mOutputStream.write(mBuffer, 0, length);
}
}
diff --git a/media/java/android/media/midi/MidiManager.java b/media/java/android/media/midi/MidiManager.java
index 1b98ca5..0ba1744 100644
--- a/media/java/android/media/midi/MidiManager.java
+++ b/media/java/android/media/midi/MidiManager.java
@@ -16,6 +16,7 @@
package android.media.midi;
+import android.bluetooth.BluetoothDevice;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
@@ -38,13 +39,28 @@ import java.util.HashMap;
*
* {@samplecode
* MidiManager manager = (MidiManager) getSystemService(Context.MIDI_SERVICE);}
- *
- * CANDIDATE FOR PUBLIC API
- * @hide
*/
public final class MidiManager {
private static final String TAG = "MidiManager";
+ /**
+ * Intent for starting BluetoothMidiService
+ * @hide
+ */
+ public static final String BLUETOOTH_MIDI_SERVICE_INTENT =
+ "android.media.midi.BluetoothMidiService";
+
+ /**
+ * BluetoothMidiService package name
+ */
+ private static final String BLUETOOTH_MIDI_SERVICE_PACKAGE = "com.android.bluetoothmidiservice";
+
+ /**
+ * BluetoothMidiService class name
+ */
+ private static final String BLUETOOTH_MIDI_SERVICE_CLASS =
+ "com.android.bluetoothmidiservice.BluetoothMidiService";
+
private final Context mContext;
private final IMidiManager mService;
private final IBinder mToken = new Binder();
@@ -128,7 +144,7 @@ public final class MidiManager {
/**
* Called to notify when the status of a MIDI device has changed
*
- * @param device a {@link MidiDeviceStatus} for the changed device
+ * @param status a {@link MidiDeviceStatus} for the changed device
*/
public void onDeviceStatusChanged(MidiDeviceStatus status) {
}
@@ -148,6 +164,19 @@ public final class MidiManager {
}
/**
+ * Callback class used for receiving the results of {@link #openBluetoothDevice}
+ */
+ abstract public static class BluetoothOpenCallback {
+ /**
+ * Called to respond to a {@link #openBluetoothDevice} request
+ *
+ * @param bluetoothDevice the {@link android.bluetooth.BluetoothDevice} to open
+ * @param device a {@link MidiDevice} for opened device, or null if opening failed
+ */
+ abstract public void onDeviceOpened(BluetoothDevice bluetoothDevice, MidiDevice device);
+ }
+
+ /**
* @hide
*/
public MidiManager(Context context, IMidiManager service) {
@@ -217,11 +246,24 @@ public final class MidiManager {
}
}
+ private void sendBluetoothDeviceResponse(final BluetoothDevice bluetoothDevice,
+ final MidiDevice device, final BluetoothOpenCallback callback, Handler handler) {
+ if (handler != null) {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ callback.onDeviceOpened(bluetoothDevice, device);
+ }
+ });
+ } else {
+ callback.onDeviceOpened(bluetoothDevice, device);
+ }
+ }
+
/**
* Opens a MIDI device for reading and writing.
*
* @param deviceInfo a {@link android.media.midi.MidiDeviceInfo} to open
- * @param callback a {@link #DeviceOpenCallback} to be called to receive the result
+ * @param callback a {@link MidiManager.DeviceOpenCallback} to be called to receive the result
* @param handler the {@link android.os.Handler Handler} that will be used for delivering
* the result. If handler is null, then the thread used for the
* callback is unspecified.
@@ -263,7 +305,7 @@ public final class MidiManager {
// return immediately to avoid calling sendOpenDeviceResponse below
return;
} else {
- Log.e(TAG, "Unable to bind service: " + intent);
+ Log.e(TAG, "Unable to bind service: " + intent);
}
}
} else {
@@ -275,6 +317,51 @@ public final class MidiManager {
sendOpenDeviceResponse(deviceInfo, device, callback, handler);
}
+ /**
+ * Opens a Bluetooth MIDI device for reading and writing.
+ *
+ * @param bluetoothDevice a {@link android.bluetooth.BluetoothDevice} to open as a MIDI device
+ * @param callback a {@link MidiManager.BluetoothOpenCallback} to be called to receive the
+ * result
+ * @param handler the {@link android.os.Handler Handler} that will be used for delivering
+ * the result. If handler is null, then the thread used for the
+ * callback is unspecified.
+ */
+ public void openBluetoothDevice(final BluetoothDevice bluetoothDevice,
+ final BluetoothOpenCallback callback, final Handler handler) {
+ Intent intent = new Intent(BLUETOOTH_MIDI_SERVICE_INTENT);
+ intent.setComponent(new ComponentName(BLUETOOTH_MIDI_SERVICE_PACKAGE,
+ BLUETOOTH_MIDI_SERVICE_CLASS));
+ intent.putExtra("device", bluetoothDevice);
+ if (!mContext.bindService(intent,
+ new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName name, IBinder binder) {
+ IMidiDeviceServer server =
+ IMidiDeviceServer.Stub.asInterface(binder);
+ try {
+ // fetch MidiDeviceInfo from the server
+ MidiDeviceInfo deviceInfo = server.getDeviceInfo();
+ MidiDevice device = new MidiDevice(deviceInfo, server, mContext, this);
+ sendBluetoothDeviceResponse(bluetoothDevice, device, callback, handler);
+ } catch (RemoteException e) {
+ Log.e(TAG, "remote exception in onServiceConnected");
+ sendBluetoothDeviceResponse(bluetoothDevice, null, callback, handler);
+ }
+ }
+
+ @Override
+ public void onServiceDisconnected(ComponentName name) {
+ // FIXME - anything to do here?
+ }
+ },
+ Context.BIND_AUTO_CREATE))
+ {
+ Log.e(TAG, "Unable to bind service: " + intent);
+ sendBluetoothDeviceResponse(bluetoothDevice, null, callback, handler);
+ }
+ }
+
/** @hide */
public MidiDeviceServer createDeviceServer(MidiReceiver[] inputPortReceivers,
int numOutputPorts, String[] inputPortNames, String[] outputPortNames,
diff --git a/media/java/android/media/midi/MidiOutputPort.java b/media/java/android/media/midi/MidiOutputPort.java
index b8ed36f..7491f3c 100644
--- a/media/java/android/media/midi/MidiOutputPort.java
+++ b/media/java/android/media/midi/MidiOutputPort.java
@@ -21,6 +21,8 @@ import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.util.Log;
+import com.android.internal.midi.MidiDispatcher;
+
import dalvik.system.CloseGuard;
import libcore.io.IoUtils;
@@ -60,12 +62,24 @@ public final class MidiOutputPort extends MidiSender implements Closeable {
// FIXME - inform receivers here?
}
- int offset = MidiPortImpl.getMessageOffset(buffer, count);
- int size = MidiPortImpl.getMessageSize(buffer, count);
- long timestamp = MidiPortImpl.getMessageTimeStamp(buffer, count);
-
- // dispatch to all our receivers
- mDispatcher.sendWithTimestamp(buffer, offset, size, timestamp);
+ int packetType = MidiPortImpl.getPacketType(buffer, count);
+ switch (packetType) {
+ case MidiPortImpl.PACKET_TYPE_DATA: {
+ int offset = MidiPortImpl.getDataOffset(buffer, count);
+ int size = MidiPortImpl.getDataSize(buffer, count);
+ long timestamp = MidiPortImpl.getPacketTimestamp(buffer, count);
+
+ // dispatch to all our receivers
+ mDispatcher.sendWithTimestamp(buffer, offset, size, timestamp);
+ break;
+ }
+ case MidiPortImpl.PACKET_TYPE_FLUSH:
+ mDispatcher.flush();
+ break;
+ default:
+ Log.e(TAG, "Unknown packet type " + packetType);
+ break;
+ }
}
} catch (IOException e) {
// FIXME report I/O failure?
diff --git a/media/java/android/media/midi/MidiPortImpl.java b/media/java/android/media/midi/MidiPortImpl.java
index 5795045..16fc214 100644
--- a/media/java/android/media/midi/MidiPortImpl.java
+++ b/media/java/android/media/midi/MidiPortImpl.java
@@ -24,6 +24,16 @@ package android.media.midi;
private static final String TAG = "MidiPort";
/**
+ * Packet type for data packet
+ */
+ public static final int PACKET_TYPE_DATA = 1;
+
+ /**
+ * Packet type for flush packet
+ */
+ public static final int PACKET_TYPE_FLUSH = 2;
+
+ /**
* Maximum size of a packet that can pass through our ParcelFileDescriptor.
*/
public static final int MAX_PACKET_SIZE = 1024;
@@ -34,12 +44,17 @@ package android.media.midi;
private static final int TIMESTAMP_SIZE = 8;
/**
+ * Data packet overhead is timestamp size plus packet type byte
+ */
+ private static final int DATA_PACKET_OVERHEAD = TIMESTAMP_SIZE + 1;
+
+ /**
* Maximum amount of MIDI data that can be included in a packet
*/
- public static final int MAX_PACKET_DATA_SIZE = MAX_PACKET_SIZE - TIMESTAMP_SIZE;
+ public static final int MAX_PACKET_DATA_SIZE = MAX_PACKET_SIZE - DATA_PACKET_OVERHEAD;
/**
- * Utility function for packing a MIDI message to be sent through our ParcelFileDescriptor
+ * Utility function for packing MIDI data to be sent through our ParcelFileDescriptor
*
* message byte array contains variable length MIDI message.
* messageSize is size of variable length MIDI message
@@ -47,46 +62,65 @@ package android.media.midi;
* dest is buffer to pack into
* returns size of packed message
*/
- public static int packMessage(byte[] message, int offset, int size, long timestamp,
+ public static int packData(byte[] message, int offset, int size, long timestamp,
byte[] dest) {
- if (size + TIMESTAMP_SIZE > MAX_PACKET_SIZE) {
- size = MAX_PACKET_SIZE - TIMESTAMP_SIZE;
+ if (size > MAX_PACKET_DATA_SIZE) {
+ size = MAX_PACKET_DATA_SIZE;
}
- // message data goes first
- System.arraycopy(message, offset, dest, 0, size);
+ int length = 0;
+ // packet type goes first
+ dest[length++] = PACKET_TYPE_DATA;
+ // data goes next
+ System.arraycopy(message, offset, dest, length, size);
+ length += size;
// followed by timestamp
for (int i = 0; i < TIMESTAMP_SIZE; i++) {
- dest[size++] = (byte)timestamp;
+ dest[length++] = (byte)timestamp;
timestamp >>= 8;
}
- return size;
+ return length;
+ }
+
+ /**
+ * Utility function for packing a flush command to be sent through our ParcelFileDescriptor
+ */
+ public static int packFlush(byte[] dest) {
+ dest[0] = PACKET_TYPE_FLUSH;
+ return 1;
+ }
+
+ /**
+ * Returns the packet type (PACKET_TYPE_DATA or PACKET_TYPE_FLUSH)
+ */
+ public static int getPacketType(byte[] buffer, int bufferLength) {
+ return buffer[0];
}
/**
- * Utility function for unpacking a MIDI message received from our ParcelFileDescriptor
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
* returns the offset of the MIDI message in packed buffer
*/
- public static int getMessageOffset(byte[] buffer, int bufferLength) {
- // message is at the beginning
- return 0;
+ public static int getDataOffset(byte[] buffer, int bufferLength) {
+ // data follows packet type byte
+ return 1;
}
/**
- * Utility function for unpacking a MIDI message received from our ParcelFileDescriptor
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
* returns size of MIDI data in packed buffer
*/
- public static int getMessageSize(byte[] buffer, int bufferLength) {
+ public static int getDataSize(byte[] buffer, int bufferLength) {
// message length is total buffer length minus size of the timestamp
- return bufferLength - TIMESTAMP_SIZE;
+ return bufferLength - DATA_PACKET_OVERHEAD;
}
/**
- * Utility function for unpacking a MIDI message received from our ParcelFileDescriptor
+ * Utility function for unpacking MIDI data received from our ParcelFileDescriptor
* unpacks timestamp from packed buffer
*/
- public static long getMessageTimeStamp(byte[] buffer, int bufferLength) {
+ public static long getPacketTimestamp(byte[] buffer, int bufferLength) {
// timestamp is at end of the packet
int offset = bufferLength;
long timestamp = 0;
diff --git a/media/java/android/media/midi/MidiReceiver.java b/media/java/android/media/midi/MidiReceiver.java
index 6f4c266..d069075 100644
--- a/media/java/android/media/midi/MidiReceiver.java
+++ b/media/java/android/media/midi/MidiReceiver.java
@@ -42,6 +42,13 @@ abstract public class MidiReceiver {
throws IOException;
/**
+ * Instructs the receiver to discard all pending events.
+ * @throws IOException
+ */
+ public void flush() throws IOException {
+ }
+
+ /**
* Returns the maximum size of a message this receiver can receive.
* Defaults to {@link java.lang.Integer#MAX_VALUE} unless overridden.
* @return maximum message size
diff --git a/media/java/android/media/midi/package.html b/media/java/android/media/midi/package.html
new file mode 100644
index 0000000..9e7e8b1
--- /dev/null
+++ b/media/java/android/media/midi/package.html
@@ -0,0 +1,324 @@
+<html>
+<body>
+<p>Android MIDI User Guide</p>
+
+<h1 id=overview>Overview</h1>
+
+
+<p>This document describes how to use the Android MIDI API in Java.</p>
+
+<p>The Android MIDI package allows users to:</p>
+
+<ul>
+ <li> Connect a MIDI keyboard to Android to play a synthesizer or drive music apps.
+ <li> Connect alternative MIDI controllers to Android.
+ <li> Drive external MIDI synths from Android.
+ <li> Drive external peripherals, lights, show control, etc from Android.
+ <li> Generate music dynamically from games or music creation apps.
+ <li> Generate MIDI messages in one app and send them to a second app.
+ <li> Use an Android device running in <em>peripheral mode</em> as a multitouch controller connected to a laptop.
+</ul>
+
+<h2 id=the_api_features_include>The API features include:</h2>
+
+
+<ul>
+ <li> Enumeration of currently available devices. Information includes name, vendor,
+capabilities, etc.
+ <li> Provide notification when MIDI devices are plugged in or unplugged.
+ <li> Support efficient transmission of single or multiple short 1-3 byte MIDI
+messages.
+ <li> Support transmission of arbitrary length data for SysEx, etc.
+ <li> Timestamps to avoid jitter.
+ <li> Support direction connection or “patching” of devices for lower latency.
+</ul>
+
+<h2 id=transports_supported>Transports Supported</h2>
+
+
+<p>The API is “transport agnostic”. But there are several transports currently
+supported:</p>
+
+<ul>
+ <li> USB
+ <li> software routing
+ <li> BTLE
+</ul>
+
+<h1 id=android_midi_terminology>Android MIDI Terminology</h1>
+
+
+<h2 id=terminology>Terminology</h2>
+
+
+<p>A <strong>Device</strong> is a MIDI capable object that has zero or more InputPorts and OutputPorts.</p>
+
+<p>An <strong>InputPort</strong> has 16 channels and can <strong>receive</strong> MIDI messages from an OutputPort or an app.</p>
+
+<p>An <strong>OutputPort</strong> has 16 channels and can <strong>send</strong> MIDI messages to an InputPort or an app.</p>
+
+<p><strong>MidiService</strong> is a centralized process that keeps track of all devices and brokers
+communication between them.</p>
+
+<p><strong>MidiManager</strong> is a class that the application or a device manager calls to communicate with
+the MidiService.</p>
+
+<h1 id=writing_a_midi_application>Writing a MIDI Application</h1>
+
+
+<h2 id=the_midimanager>The MidiManager</h2>
+
+
+<p>The primary class for accessing the MIDI package is through the MidiManager.</p>
+
+<pre class=prettyprint>
+MidiManager m = (MidiManager)context.getSystemService(Context.MIDI_SERVICE);
+</pre>
+
+
+<h2 id=get_list_of_already_plugged_in_entities>Get List of Already Plugged In Entities</h2>
+
+
+<p>When an app starts, it can get a list of all the available MIDI devices. This
+information can be presented to a user, allowing them to choose a device.</p>
+
+<pre class=prettyprint>
+MidiDeviceInfo[] infos = m.getDeviceList();
+</pre>
+
+
+<h2 id=notification_of_midi_devices_hotplug_events>Notification of MIDI Devices HotPlug Events</h2>
+
+
+<p>The application can request notification when, for example, keyboards are
+plugged in or unplugged.</p>
+
+<pre class=prettyprint>
+m.registerDeviceCallback(new MidiManager.DeviceCallback() {
+ public void onDeviceAdded( MidiDeviceInfo info ) {
+ ...
+ }
+ public void onDeviceRemoved( MidiDeviceInfo info ) {
+ ...
+ }
+ });
+</pre>
+
+
+<h2 id=device_and_port_information>Device and Port Information</h2>
+
+
+<p>You can query the number of input and output ports.</p>
+
+<pre class=prettyprint>
+int numInputs = info.getInputPortCount();
+int numOutputs = info.getOutputPortCount();
+</pre>
+
+
+<p>Note that “input” and “output” are from the standpoint of the device. So a
+synthesizer will have an “input” port that receives messages. A keyboard will
+have an “output” port that sends messages.</p>
+
+<p>The MidiDeviceInfo has a bundle of properties.</p>
+
+<pre class=prettyprint>
+Bundle properties = info.getProperties();
+String manufacturer = properties
+ .getString(MidiDeviceInfo.PROPERTY_MANUFACTURER);
+</pre>
+
+
+<p>Other properties include PROPERTY_PRODUCT, PROPERTY_NAME,
+PROPERTY_SERIAL_NUMBER</p>
+
+<p>You can get the names of the ports from a PortInfo object.</p>
+
+<pre class=prettyprint>
+PortInfo portInfo = info.getInputPortInfo(i);
+String portName = portInfo.getName();
+</pre>
+
+
+<h2 id=open_a_midi_device>Open a MIDI Device</h2>
+
+
+<p>To access a MIDI device you need to open it first. The open is asynchronous so
+you need to provide a callback for completion. You can specify an optional
+Handler if you want the callback to occur on a specific Thread.</p>
+
+<pre class=prettyprint>
+m.openDevice(info, new MidiManager.DeviceOpenCallback() {
+ &#64;Override
+ public void onDeviceOpened(MidiDeviceInfo deviceInfo,
+ MidiDevice device) {
+ if (device == null) {
+ Log.e(TAG, "could not open " + deviceInfo);
+ } else {
+ ...
+ }, new Handler(Looper.getMainLooper())
+ );
+</pre>
+
+
+<h2 id=open_a_midi_input_port>Open a MIDI Input Port</h2>
+
+
+<p>If you want to send a message to a MIDI Device then you need to open an “input”
+port with exclusive access.</p>
+
+<pre class=prettyprint>
+MidiInputPort inputPort = device.openInputPort(index);
+</pre>
+
+
+<h2 id=send_a_noteon>Send a NoteOn</h2>
+
+
+<p>MIDI messages are sent as byte arrays. Here we encode a NoteOn message.</p>
+
+<pre class=prettyprint>
+byte[] buffer = new buffer[64];
+int numBytes = 0;
+buffer[numBytes++] = 0x90 + channel; // note on
+buffer[numBytes++] = pitch;
+buffer[numBytes++] = velocity;
+int offset = 0;
+// post is non-blocking
+inputPort.send(buffer, offset, numBytes);
+</pre>
+
+
+<p>Sometimes it is convenient to send MIDI messages with a timestamp. By
+scheduling events in the future we can mask scheduling jitter. Android MIDI
+timestamps are based on the monotonic nanosecond system timer. This is
+consistent with the other audio and input timers.</p>
+
+<p>Here we send a message with a timestamp 2 seconds in the future.</p>
+
+<pre class=prettyprint>
+long now = System.nanoTime();
+long future = now + (2 * 1000000000);
+inputPort.sendWithTimestamp(buffer, offset, numBytes, future);
+</pre>
+
+
+<p>If you want to cancel events that you have scheduled in the future then call
+flush().</p>
+
+<pre class=prettyprint>
+inputPort.flush(); // discard events
+</pre>
+
+
+<p>If there were any MIDI NoteOff message left in the buffer then they will be
+discarded and you may get stuck notes. So we recommend sending “all notes off”
+after doing a flush.</p>
+
+<h2 id=receive_a_note>Receive a Note</h2>
+
+
+<p>To receive MIDI data from a device you need to extend MidiReceiver. Then
+connect your receiver to an output port of the device.</p>
+
+<pre class=prettyprint>
+class MyReceiver extends MidiReceiver {
+ public void onReceive(byte[] data, int offset,
+ int count, long timestamp) throws IOException {
+ // parse MIDI or whatever
+ }
+}
+MidiOutputPort outputPort = device.openOutputPort(index);
+outputPort.connect(new MyReceiver());
+</pre>
+
+
+<p>The data that arrives is not validated or aligned in any particular way. It is
+raw MIDI data and can contain multiple messages or partial messages. It might
+contain System Real-Time messages, which can be interleaved inside other
+messages. Some applications have their own MIDI parsers so pre-parsing the data
+would be redundant. If an application wants the data parsed and aligned then
+they can use the MidiFramer utility.</p>
+
+<h1 id=creating_a_midi_virtual_device_service>Creating a MIDI Virtual Device Service</h1>
+
+
+<p>An app can provide a MIDI Service that can be used by other apps. For example,
+an app can provide a custom synthesizer that other apps can send messages to. </p>
+
+<h2 id=manifest_files>Manifest Files</h2>
+
+
+<p>An app declares that it will function as a MIDI server in the
+AndroidManifest.xml file.</p>
+
+<pre class=prettyprint>
+&lt;service android:name="<strong>MySynthDeviceService</strong>">
+ &lt;intent-filter>
+ &lt;action android:name="android.media.midi.MidiDeviceService" />
+ &lt;/intent-filter>
+ &lt;meta-data android:name="android.media.midi.MidiDeviceService"
+ android:resource="@xml/<strong>synth_device_info</strong>" />
+&lt;/service>
+</pre>
+
+
+<p>The details of the resource in this example is stored in
+“res/xml/synth_device_info.xml”.</p>
+
+<pre class=prettyprint>
+&lt;devices>
+ &lt;device manufacturer="MyCompany" product="MidiSynthBasic">
+ &lt;input-port name="input" />
+ &lt;/device>
+&lt;/devices>
+</pre>
+
+
+<h2 id=extend_midideviceservice>Extend MidiDeviceService</h2>
+
+
+<p>You then define your server by extending android.media.midi.MidiDeviceService.
+Let’s assume you have a MySynthEngine class that extends MidiReceiver.</p>
+
+<pre class=prettyprint>
+import android.media.midi.MidiDeviceService;
+import android.media.midi.MidiDeviceStatus;
+import android.media.midi.MidiReceiver;
+
+public class MidiSynthDeviceService extends MidiDeviceService {
+ private static final String TAG = "MidiSynthDeviceService";
+ private MySynthEngine mSynthEngine = new MySynthEngine();
+ &#64;Override
+ public void onCreate() {
+ super.onCreate();
+ }
+
+ &#64;Override
+ public void onDestroy() {
+ mSynthEngine.stop();
+ super.onDestroy();
+ }
+
+ &#64;Override
+ // Declare the receivers associated with your input ports.
+ public MidiReceiver[] onGetInputPortReceivers() {
+ return new MidiReceiver[] { mSynthEngine };
+ }
+
+ /**
+ * This will get called when clients connect or disconnect.
+ * You can use it to turn on your synth only when needed.
+ */
+ &#64;Override
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ if (status.isInputPortOpen(0)) {
+ mSynthEngine.start();
+ } else {
+ mSynthEngine.stop();
+ }
+ }
+}
+</pre>
+</body>
+</html>
diff --git a/media/java/android/media/session/ISessionCallback.aidl b/media/java/android/media/session/ISessionCallback.aidl
index 49087b0..adb6b06 100644
--- a/media/java/android/media/session/ISessionCallback.aidl
+++ b/media/java/android/media/session/ISessionCallback.aidl
@@ -15,8 +15,8 @@
package android.media.session;
-import android.media.Rating;
import android.content.Intent;
+import android.media.Rating;
import android.net.Uri;
import android.os.Bundle;
import android.os.ResultReceiver;
@@ -30,8 +30,9 @@ oneway interface ISessionCallback {
// These callbacks are for the TransportPerformer
void onPlay();
- void onPlayFromMediaId(String uri, in Bundle extras);
+ void onPlayFromMediaId(String mediaId, in Bundle extras);
void onPlayFromSearch(String query, in Bundle extras);
+ void onPlayFromUri(in Uri uri, in Bundle extras);
void onSkipToTrack(long id);
void onPause();
void onStop();
diff --git a/media/java/android/media/session/ISessionController.aidl b/media/java/android/media/session/ISessionController.aidl
index e2d06d3..8d58a60 100644
--- a/media/java/android/media/session/ISessionController.aidl
+++ b/media/java/android/media/session/ISessionController.aidl
@@ -23,9 +23,9 @@ import android.media.Rating;
import android.media.routing.IMediaRouterDelegate;
import android.media.routing.IMediaRouterStateCallback;
import android.media.session.ISessionControllerCallback;
+import android.media.session.MediaSession;
import android.media.session.ParcelableVolumeInfo;
import android.media.session.PlaybackState;
-import android.media.session.MediaSession;
import android.net.Uri;
import android.os.Bundle;
import android.os.ResultReceiver;
@@ -55,8 +55,9 @@ interface ISessionController {
// These commands are for the TransportControls
void play();
- void playFromMediaId(String uri, in Bundle extras);
+ void playFromMediaId(String mediaId, in Bundle extras);
void playFromSearch(String string, in Bundle extras);
+ void playFromUri(in Uri uri, in Bundle extras);
void skipToQueueItem(long id);
void pause();
void stop();
diff --git a/media/java/android/media/session/MediaController.java b/media/java/android/media/session/MediaController.java
index c23a139..dd81a22 100644
--- a/media/java/android/media/session/MediaController.java
+++ b/media/java/android/media/session/MediaController.java
@@ -516,8 +516,8 @@ public final class MediaController {
}
/**
- * Callback for receiving updates on from the session. A Callback can be
- * registered using {@link #registerCallback}
+ * Callback for receiving updates from the session. A Callback can be
+ * registered using {@link #registerCallback}.
*/
public static abstract class Callback {
/**
@@ -615,9 +615,9 @@ public final class MediaController {
}
/**
- * Request that the player start playback for a specific {@link Uri}.
+ * Request that the player start playback for a specific media id.
*
- * @param mediaId The uri of the requested media.
+ * @param mediaId The id of the requested media.
* @param extras Optional extras that can include extra information about the media item
* to be played.
*/
@@ -656,6 +656,25 @@ public final class MediaController {
}
/**
+ * Request that the player start playback for a specific {@link Uri}.
+ *
+ * @param uri The URI of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be played.
+ */
+ public void playFromUri(Uri uri, Bundle extras) {
+ if (uri == null || Uri.EMPTY.equals(uri)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty Uri for playFromUri.");
+ }
+ try {
+ mSessionBinder.playFromUri(uri, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play(" + uri + ").", e);
+ }
+ }
+
+ /**
* Play an item with a specific id in the play queue. If you specify an
* id that is not in the play queue, the behavior is undefined.
*/
diff --git a/media/java/android/media/session/MediaSession.java b/media/java/android/media/session/MediaSession.java
index cc602c9..cee82b4 100644
--- a/media/java/android/media/session/MediaSession.java
+++ b/media/java/android/media/session/MediaSession.java
@@ -30,6 +30,7 @@ import android.media.MediaMetadata;
import android.media.Rating;
import android.media.VolumeProvider;
import android.media.routing.MediaRouter;
+import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
@@ -541,6 +542,10 @@ public final class MediaSession {
postToCallback(CallbackMessageHandler.MSG_PLAY_SEARCH, query, extras);
}
+ private void dispatchPlayFromUri(Uri uri, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PLAY_URI, uri, extras);
+ }
+
private void dispatchSkipToItem(long id) {
postToCallback(CallbackMessageHandler.MSG_SKIP_TO_ITEM, id);
}
@@ -833,6 +838,12 @@ public final class MediaSession {
}
/**
+ * Override to handle requests to play a specific media item represented by a URI.
+ */
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ }
+
+ /**
* Override to handle requests to play an item with a given id from the
* play queue.
*/
@@ -961,6 +972,14 @@ public final class MediaSession {
}
@Override
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlayFromUri(uri, extras);
+ }
+ }
+
+ @Override
public void onSkipToTrack(long id) {
MediaSession session = mMediaSession.get();
if (session != null) {
@@ -1171,6 +1190,7 @@ public final class MediaSession {
private static final int MSG_COMMAND = 15;
private static final int MSG_ADJUST_VOLUME = 16;
private static final int MSG_SET_VOLUME = 17;
+ private static final int MSG_PLAY_URI = 18;
private MediaSession.Callback mCallback;
@@ -1210,6 +1230,9 @@ public final class MediaSession {
case MSG_PLAY_SEARCH:
mCallback.onPlayFromSearch((String) msg.obj, msg.getData());
break;
+ case MSG_PLAY_URI:
+ mCallback.onPlayFromUri((Uri) msg.obj, msg.getData());
+ break;
case MSG_SKIP_TO_ITEM:
mCallback.onSkipToQueueItem((Long) msg.obj);
break;
diff --git a/media/java/android/media/session/PlaybackState.java b/media/java/android/media/session/PlaybackState.java
index 6807e7f..bbe04b5 100644
--- a/media/java/android/media/session/PlaybackState.java
+++ b/media/java/android/media/session/PlaybackState.java
@@ -126,6 +126,13 @@ public final class PlaybackState implements Parcelable {
public static final long ACTION_SKIP_TO_QUEUE_ITEM = 1 << 12;
/**
+ * Indicates this session supports the play from URI command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_FROM_URI = 1 << 13;
+
+ /**
* This is the default playback state and indicates that no media has been
* added yet, or the performer has been reset and has no content to play.
*
@@ -353,6 +360,11 @@ public final class PlaybackState implements Parcelable {
* <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
* <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
* <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
* </ul>
*/
public long getActions() {
@@ -868,6 +880,11 @@ public final class PlaybackState implements Parcelable {
* <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
* <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
* <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
* </ul>
*
* @param actions The set of actions allowed.
diff --git a/media/java/android/media/tv/ITvInputClient.aidl b/media/java/android/media/tv/ITvInputClient.aidl
index 7a023d6..86c0e5d 100644
--- a/media/java/android/media/tv/ITvInputClient.aidl
+++ b/media/java/android/media/tv/ITvInputClient.aidl
@@ -40,4 +40,7 @@ oneway interface ITvInputClient {
void onContentAllowed(int seq);
void onContentBlocked(in String rating, int seq);
void onLayoutSurface(int left, int top, int right, int bottom, int seq);
+ void onTimeShiftStatusChanged(int status, int seq);
+ void onTimeShiftStartPositionChanged(long timeMs, int seq);
+ void onTimeShiftCurrentPositionChanged(long timeMs, int seq);
}
diff --git a/media/java/android/media/tv/ITvInputManager.aidl b/media/java/android/media/tv/ITvInputManager.aidl
index 21549c9..b6491d8 100644
--- a/media/java/android/media/tv/ITvInputManager.aidl
+++ b/media/java/android/media/tv/ITvInputManager.aidl
@@ -74,6 +74,12 @@ interface ITvInputManager {
void requestUnblockContent(in IBinder sessionToken, in String unblockedRating, int userId);
+ void timeShiftPause(in IBinder sessionToken, int userId);
+ void timeShiftResume(in IBinder sessionToken, int userId);
+ void timeShiftSeekTo(in IBinder sessionToken, long timeMs, int userId);
+ void timeShiftSetPlaybackRate(in IBinder sessionToken, float rate, int audioMode, int userId);
+ void timeShiftEnablePositionTracking(in IBinder sessionToken, boolean enable, int userId);
+
// For TV input hardware binding
List<TvInputHardwareInfo> getHardwareList();
ITvInputHardware acquireTvInputHardware(int deviceId, in ITvInputHardwareCallback callback,
diff --git a/media/java/android/media/tv/ITvInputSession.aidl b/media/java/android/media/tv/ITvInputSession.aidl
index 1aad2fa..a054200 100644
--- a/media/java/android/media/tv/ITvInputSession.aidl
+++ b/media/java/android/media/tv/ITvInputSession.aidl
@@ -46,4 +46,10 @@ oneway interface ITvInputSession {
void removeOverlayView();
void requestUnblockContent(in String unblockedRating);
+
+ void timeShiftPause();
+ void timeShiftResume();
+ void timeShiftSeekTo(long timeMs);
+ void timeShiftSetPlaybackRate(float rate, int audioMode);
+ void timeShiftEnablePositionTracking(boolean enable);
}
diff --git a/media/java/android/media/tv/ITvInputSessionCallback.aidl b/media/java/android/media/tv/ITvInputSessionCallback.aidl
index 063d10d..e936810 100644
--- a/media/java/android/media/tv/ITvInputSessionCallback.aidl
+++ b/media/java/android/media/tv/ITvInputSessionCallback.aidl
@@ -37,4 +37,7 @@ oneway interface ITvInputSessionCallback {
void onContentAllowed();
void onContentBlocked(in String rating);
void onLayoutSurface(int left, int top, int right, int bottom);
+ void onTimeShiftStatusChanged(int status);
+ void onTimeShiftStartPositionChanged(long timeMs);
+ void onTimeShiftCurrentPositionChanged(long timeMs);
}
diff --git a/media/java/android/media/tv/ITvInputSessionWrapper.java b/media/java/android/media/tv/ITvInputSessionWrapper.java
index 94c9690..a3442e3 100644
--- a/media/java/android/media/tv/ITvInputSessionWrapper.java
+++ b/media/java/android/media/tv/ITvInputSessionWrapper.java
@@ -57,6 +57,11 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
private static final int DO_RELAYOUT_OVERLAY_VIEW = 11;
private static final int DO_REMOVE_OVERLAY_VIEW = 12;
private static final int DO_REQUEST_UNBLOCK_CONTENT = 13;
+ private static final int DO_TIME_SHIFT_PAUSE = 14;
+ private static final int DO_TIME_SHIFT_RESUME = 15;
+ private static final int DO_TIME_SHIFT_SEEK_TO = 16;
+ private static final int DO_TIME_SHIFT_SET_PLAYBACK_RATE = 17;
+ private static final int DO_TIME_SHIFT_ENABLE_POSITION_TRACKING = 18;
private final HandlerCaller mCaller;
@@ -153,6 +158,26 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
mTvInputSessionImpl.unblockContent((String) msg.obj);
break;
}
+ case DO_TIME_SHIFT_PAUSE: {
+ mTvInputSessionImpl.timeShiftPause();
+ break;
+ }
+ case DO_TIME_SHIFT_RESUME: {
+ mTvInputSessionImpl.timeShiftResume();
+ break;
+ }
+ case DO_TIME_SHIFT_SEEK_TO: {
+ mTvInputSessionImpl.timeShiftSeekTo((Long) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_SET_PLAYBACK_RATE: {
+ mTvInputSessionImpl.timeShiftSetPlaybackRate((Float) msg.obj, msg.arg1);
+ break;
+ }
+ case DO_TIME_SHIFT_ENABLE_POSITION_TRACKING: {
+ mTvInputSessionImpl.timeShiftEnablePositionTracking((Boolean) msg.obj);
+ break;
+ }
default: {
Log.w(TAG, "Unhandled message code: " + msg.what);
break;
@@ -242,6 +267,34 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
DO_REQUEST_UNBLOCK_CONTENT, unblockedRating));
}
+ @Override
+ public void timeShiftPause() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_PAUSE));
+ }
+
+ @Override
+ public void timeShiftResume() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_RESUME));
+ }
+
+ @Override
+ public void timeShiftSeekTo(long timeMs) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_TIME_SHIFT_SEEK_TO,
+ Long.valueOf(timeMs)));
+ }
+
+ @Override
+ public void timeShiftSetPlaybackRate(float rate, int audioMode) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageIO(DO_TIME_SHIFT_SET_PLAYBACK_RATE,
+ audioMode, Float.valueOf(rate)));
+ }
+
+ @Override
+ public void timeShiftEnablePositionTracking(boolean enable) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(
+ DO_TIME_SHIFT_ENABLE_POSITION_TRACKING, Boolean.valueOf(enable)));
+ }
+
private final class TvInputEventReceiver extends InputEventReceiver {
public TvInputEventReceiver(InputChannel inputChannel, Looper looper) {
super(inputChannel, looper);
diff --git a/media/java/android/media/tv/TvContentRating.java b/media/java/android/media/tv/TvContentRating.java
index 596155e..daeb1cc 100644
--- a/media/java/android/media/tv/TvContentRating.java
+++ b/media/java/android/media/tv/TvContentRating.java
@@ -16,9 +16,12 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.text.TextUtils;
+import com.android.internal.util.Preconditions;
+
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -32,11 +35,11 @@ import java.util.Objects;
* To create a {@code TvContentRating} object, use the
* {@link #createRating TvContentRating.createRating} method with valid rating system string
* constants.
- * <p>
- * It is possible for an application to define its own content rating system by supplying a content
- * rating system definition XML resource (see example below) and declaring a broadcast receiver that
- * filters {@link TvInputManager#ACTION_QUERY_CONTENT_RATING_SYSTEMS} in its manifest.
- * </p>
+ *
+ * <p>It is possible for an application to define its own content rating system by supplying a
+ * content rating system definition XML resource (see example below) and declaring a broadcast
+ * receiver that filters {@link TvInputManager#ACTION_QUERY_CONTENT_RATING_SYSTEMS} in its manifest.
+ *
* <h3> Example: Rating system definition for the TV Parental Guidelines</h3>
* The following XML example shows how the TV Parental Guidelines in the United States can be
* defined:
@@ -120,15 +123,16 @@ import java.util.Objects;
* <rating android:name="US_TV_MA" />
* </rating-order>
* </rating-system-definition>
- * </rating-system-definitions>}</pre></p>
+ * </rating-system-definitions>}</pre>
*
* <h3>System defined rating strings</h3>
* The following strings are defined by the system to provide a standard way to create
* {@code TvContentRating} objects.
+ *
* <p>For example, to create an object that represents TV-PG rating with suggestive dialogue and
* coarse language from the TV Parental Guidelines in the United States, one can use the following
* code snippet:
- * </p>
+ *
* <pre>
* TvContentRating rating = TvContentRating.createRating(
* "com.android.tv",
@@ -779,6 +783,12 @@ public final class TvContentRating {
private final int mHashCode;
/**
+ * Rating constant denoting unrated content.
+ */
+ public static final TvContentRating UNRATED = new TvContentRating("com.android.tv", "",
+ "UNRATED", null);
+
+ /**
* Creates a {@code TvContentRating} object with predefined content rating strings.
*
* @param domain The domain string. For example, "com.android.tv".
@@ -910,20 +920,17 @@ public final class TvContentRating {
/**
* Returns {@code true} if this rating has the same main rating as the specified rating and when
* this rating's sub-ratings contain the other's.
- * <p>
- * For example, a {@code TvContentRating} object that represents TV-PG with S(Sexual content)
- * and V(Violence) contains TV-PG, TV-PG/S, TV-PG/V and itself.
- * </p>
+ *
+ * <p>For example, a {@code TvContentRating} object that represents TV-PG with
+ * S(Sexual content) and V(Violence) contains TV-PG, TV-PG/S, TV-PG/V and itself.
*
* @param rating The {@link TvContentRating} to check.
* @return {@code true} if this object contains {@code rating}, {@code false} otherwise.
* @hide
*/
@SystemApi
- public final boolean contains(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public final boolean contains(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
if (!rating.getMainRating().equals(mRating)) {
return false;
}
diff --git a/media/java/android/media/tv/TvContract.java b/media/java/android/media/tv/TvContract.java
index bc9722e..f5a6f2b 100644
--- a/media/java/android/media/tv/TvContract.java
+++ b/media/java/android/media/tv/TvContract.java
@@ -16,6 +16,7 @@
package android.media.tv;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.content.ComponentName;
import android.content.ContentResolver;
@@ -30,15 +31,13 @@ import java.util.List;
import java.util.Map;
/**
- * <p>
* The contract between the TV provider and applications. Contains definitions for the supported
* URIs and columns.
- * </p>
* <h3>Overview</h3>
- * <p>
- * TvContract defines a basic database of TV content metadata such as channel and program
+ *
+ * <p>TvContract defines a basic database of TV content metadata such as channel and program
* information. The information is stored in {@link Channels} and {@link Programs} tables.
- * </p>
+ *
* <ul>
* <li>A row in the {@link Channels} table represents information about a TV channel. The data
* format can vary greatly from standard to standard or according to service provider, thus
@@ -156,7 +155,7 @@ public final class TvContract {
* @param inputId The ID of the TV input to build a channels URI for. If {@code null}, builds a
* URI for all the TV inputs.
*/
- public static final Uri buildChannelsUriForInput(String inputId) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId) {
return buildChannelsUriForInput(inputId, false);
}
@@ -171,7 +170,8 @@ public final class TvContract {
* @hide
*/
@SystemApi
- public static final Uri buildChannelsUriForInput(String inputId, boolean browsableOnly) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId,
+ boolean browsableOnly) {
Uri.Builder builder = Channels.CONTENT_URI.buildUpon();
if (inputId != null) {
builder.appendQueryParameter(PARAM_INPUT, inputId);
@@ -193,8 +193,8 @@ public final class TvContract {
* @hide
*/
@SystemApi
- public static final Uri buildChannelsUriForInput(String inputId, String genre,
- boolean browsableOnly) {
+ public static final Uri buildChannelsUriForInput(@Nullable String inputId,
+ @Nullable String genre, boolean browsableOnly) {
if (genre == null) {
return buildChannelsUriForInput(inputId, browsableOnly);
}
@@ -333,13 +333,12 @@ public final class TvContract {
public interface BaseTvColumns extends BaseColumns {
/**
* The name of the package that owns a row in each table.
- * <p>
- * The TV provider fills it in with the name of the package that provides the initial data
+ *
+ * <p>The TV provider fills it in with the name of the package that provides the initial data
* of that row. If the package is later uninstalled, the rows it owns are automatically
* removed from the tables.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_PACKAGE_NAME = "package_name";
}
@@ -509,181 +508,171 @@ public final class TvContract {
* is not defined for the given video format.
* @see #COLUMN_VIDEO_FORMAT
*/
+ @Nullable
public static final String getVideoResolution(String videoFormat) {
return VIDEO_FORMAT_TO_RESOLUTION_MAP.get(videoFormat);
}
/**
* The ID of the TV input service that provides this TV channel.
- * <p>
- * Use {@link #buildInputId} to build the ID.
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Use {@link #buildInputId} to build the ID.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INPUT_ID = "input_id";
/**
* The predefined type of this TV channel.
- * <p>
- * This is primarily used to indicate which broadcast standard (e.g. ATSC, DVB or ISDB) the
- * current channel conforms to. The value should match to one of the followings:
+ *
+ * <p>This is primarily used to indicate which broadcast standard (e.g. ATSC, DVB or ISDB)
+ * the current channel conforms to. The value should match to one of the followings:
* {@link #TYPE_OTHER}, {@link #TYPE_DVB_T}, {@link #TYPE_DVB_T2}, {@link #TYPE_DVB_S},
* {@link #TYPE_DVB_S2}, {@link #TYPE_DVB_C}, {@link #TYPE_DVB_C2}, {@link #TYPE_DVB_H},
* {@link #TYPE_DVB_SH}, {@link #TYPE_ATSC_T}, {@link #TYPE_ATSC_C},
* {@link #TYPE_ATSC_M_H}, {@link #TYPE_ISDB_T}, {@link #TYPE_ISDB_TB},
* {@link #TYPE_ISDB_S}, {@link #TYPE_ISDB_C}, {@link #TYPE_1SEG}, {@link #TYPE_DTMB},
* {@link #TYPE_CMMB}, {@link #TYPE_T_DMB}, {@link #TYPE_S_DMB}
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_TYPE = "type";
/**
* The predefined service type of this TV channel.
- * <p>
- * This is primarily used to indicate whether the current channel is a regular TV channel or
- * a radio-like channel. Use the same coding for {@code service_type} in the underlying
+ *
+ * <p>This is primarily used to indicate whether the current channel is a regular TV channel
+ * or a radio-like channel. Use the same coding for {@code service_type} in the underlying
* broadcast standard if it is defined there (e.g. ATSC A/53, ETSI EN 300 468 and ARIB
* STD-B10). Otherwise use one of the followings: {@link #SERVICE_TYPE_OTHER},
* {@link #SERVICE_TYPE_AUDIO_VIDEO}, {@link #SERVICE_TYPE_AUDIO}
- * </p><p>
- * This is a required field.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_SERVICE_TYPE = "service_type";
/**
* The original network ID of this TV channel.
- * <p>
- * This is used to identify the originating delivery system, if applicable. Use the same
+ *
+ * <p>This is used to identify the originating delivery system, if applicable. Use the same
* coding for {@code original_network_id} in the underlying broadcast standard if it is
* defined there (e.g. ETSI EN 300 468/TR 101 211 and ARIB STD-B10). If channels cannot be
* globally identified by 2-tuple {{@link #COLUMN_TRANSPORT_STREAM_ID},
* {@link #COLUMN_SERVICE_ID}}, one must carefully assign a value to this field to form a
* unique 3-tuple identification {{@link #COLUMN_ORIGINAL_NETWORK_ID},
* {@link #COLUMN_TRANSPORT_STREAM_ID}, {@link #COLUMN_SERVICE_ID}} for its channels.
- * </p><p>
- * This is a required field if the channel cannot be uniquely identified by a 2-tuple
+ *
+ * <p>This is a required field if the channel cannot be uniquely identified by a 2-tuple
* {{@link #COLUMN_TRANSPORT_STREAM_ID}, {@link #COLUMN_SERVICE_ID}}.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_ORIGINAL_NETWORK_ID = "original_network_id";
/**
* The transport stream ID of this channel.
- * <p>
- * This is used to identify the Transport Stream that contains the current channel from any
- * other multiplex within a network, if applicable. Use the same coding for
+ *
+ * <p>This is used to identify the Transport Stream that contains the current channel from
+ * any other multiplex within a network, if applicable. Use the same coding for
* {@code transport_stream_id} defined in ISO/IEC 13818-1 if the channel is transmitted via
* the MPEG Transport Stream as is the case for many digital broadcast standards.
- * </p><p>
- * This is a required field if the current channel is transmitted via the MPEG Transport
+ *
+ * <p>This is a required field if the current channel is transmitted via the MPEG Transport
* Stream.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_TRANSPORT_STREAM_ID = "transport_stream_id";
/**
* The service ID of this channel.
- * <p>
- * This is used to identify the current service (roughly equivalent to channel) from any
+ *
+ * <p>This is used to identify the current service (roughly equivalent to channel) from any
* other service within the Transport Stream, if applicable. Use the same coding for
* {@code service_id} in the underlying broadcast standard if it is defined there (e.g. ETSI
* EN 300 468 and ARIB STD-B10) or {@code program_number} (which usually has the same value
* as {@code service_id}) in ISO/IEC 13818-1 if the channel is transmitted via the MPEG
* Transport Stream.
- * </p><p>
- * This is a required field if the current channel is transmitted via the MPEG Transport
+ *
+ * <p>This is a required field if the current channel is transmitted via the MPEG Transport
* Stream.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_SERVICE_ID = "service_id";
/**
* The channel number that is displayed to the user.
- * <p>
- * The format can vary depending on broadcast standard and product specification.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>The format can vary depending on broadcast standard and product specification.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DISPLAY_NUMBER = "display_number";
/**
* The channel name that is displayed to the user.
- * <p>
- * A call sign is a good candidate to use for this purpose but any name that helps the user
- * recognize the current channel will be enough. Can also be empty depending on broadcast
- * standard.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>A call sign is a good candidate to use for this purpose but any name that helps the
+ * user recognize the current channel will be enough. Can also be empty depending on
+ * broadcast standard.
+ *
+ * <p> Type: TEXT
*/
public static final String COLUMN_DISPLAY_NAME = "display_name";
/**
* The network affiliation for this TV channel.
- * <p>
- * This is used to identify a channel that is commonly called by its network affiliation
+ *
+ * <p>This is used to identify a channel that is commonly called by its network affiliation
* instead of the display name. Examples include ABC for the channel KGO-HD, FOX for the
* channel KTVU-HD and NBC for the channel KNTV-HD. Can be empty if not applicable.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_NETWORK_AFFILIATION = "network_affiliation";
/**
* The description of this TV channel.
- * <p>
- * Can be empty initially.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty initially.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DESCRIPTION = "description";
/**
* The typical video format for programs from this TV channel.
- * <p>
- * This is primarily used to filter out channels based on video format by applications. The
- * value should match one of the followings: {@link #VIDEO_FORMAT_240P},
+ *
+ * <p>This is primarily used to filter out channels based on video format by applications.
+ * The value should match one of the followings: {@link #VIDEO_FORMAT_240P},
* {@link #VIDEO_FORMAT_360P}, {@link #VIDEO_FORMAT_480I}, {@link #VIDEO_FORMAT_480P},
* {@link #VIDEO_FORMAT_576I}, {@link #VIDEO_FORMAT_576P}, {@link #VIDEO_FORMAT_720P},
* {@link #VIDEO_FORMAT_1080I}, {@link #VIDEO_FORMAT_1080P}, {@link #VIDEO_FORMAT_2160P},
* {@link #VIDEO_FORMAT_4320P}. Note that the actual video resolution of each program from a
* given channel can vary thus one should use {@link Programs#COLUMN_VIDEO_WIDTH} and
* {@link Programs#COLUMN_VIDEO_HEIGHT} to get more accurate video resolution.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
+ *
* @see #getVideoResolution
*/
public static final String COLUMN_VIDEO_FORMAT = "video_format";
/**
* The flag indicating whether this TV channel is browsable or not.
- * <p>
- * A value of 1 indicates the channel is included in the channel list that applications use
- * to browse channels, a value of 0 indicates the channel is not included in the list. If
- * not specified, this value is set to 0 (not browsable) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>A value of 1 indicates the channel is included in the channel list that applications
+ * use to browse channels, a value of 0 indicates the channel is not included in the list.
+ * If not specified, this value is set to 0 (not browsable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
* @hide
*/
@SystemApi
@@ -691,31 +680,29 @@ public final class TvContract {
/**
* The flag indicating whether this TV channel is searchable or not.
- * <p>
- * In some regions, it is not allowed to surface search results for a given channel without
- * broadcaster's consent. This is used to impose such restriction. Channels marked with
- * "not searchable" cannot be used by other services except for the system service that
+ *
+ * <p>In some regions, it is not allowed to surface search results for a given channel
+ * without broadcaster's consent. This is used to impose such restriction. Channels marked
+ * with "not searchable" cannot be used by other services except for the system service that
* shows the TV content. A value of 1 indicates the channel is searchable and can be
* included in search results, a value of 0 indicates the channel and its TV programs are
* hidden from search. If not specified, this value is set to 1 (searchable) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>Type: INTEGER (boolean)
*/
public static final String COLUMN_SEARCHABLE = "searchable";
/**
* The flag indicating whether this TV channel is locked or not.
- * <p>
- * This is primarily used for alternative parental control to prevent unauthorized users
+ *
+ * <p>This is primarily used for alternative parental control to prevent unauthorized users
* from watching the current channel regardless of the content rating. A value of 1
* indicates the channel is locked and the user is required to enter passcode to unlock it
* in order to watch the current program from the channel, a value of 0 indicates the
* channel is not locked thus the user is not prompted to enter passcode If not specified,
* this value is set to 0 (not locked) by default.
- * </p><p>
- * Type: INTEGER (boolean)
- * </p>
+ *
+ * <p>Type: INTEGER (boolean)
* @hide
*/
@SystemApi
@@ -723,25 +710,63 @@ public final class TvContract {
/**
* Internal data used by individual TV input services.
- * <p>
- * This is internal to the provider that inserted it, and should not be decoded by other
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
* apps.
- * </p><p>
- * Type: BLOB
- * </p>
+ *
+ * <p>Type: BLOB
*/
public static final String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
/**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
* The version number of this row entry used by TV input services.
- * <p>
- * This is best used by sync adapters to identify the rows to update. The number can be
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
* defined by individual TV input services. One may assign the same value as
* {@code version_number} that appears in ETSI EN 300 468 or ATSC A/65, if the data are
* coming from a TV broadcast.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VERSION_NUMBER = "version_number";
@@ -749,18 +774,18 @@ public final class TvContract {
/**
* A sub-directory of a single TV channel that represents its primary logo.
- * <p>
- * To access this directory, append {@link Channels.Logo#CONTENT_DIRECTORY} to the raw
+ *
+ * <p>To access this directory, append {@link Channels.Logo#CONTENT_DIRECTORY} to the raw
* channel URI. The resulting URI represents an image file, and should be interacted
* using ContentResolver.openAssetFileDescriptor.
- * </p><p>
- * Note that this sub-directory also supports opening the logo as an asset file in write
+ *
+ * <p>Note that this sub-directory also supports opening the logo as an asset file in write
* mode. Callers can create or replace the primary logo associated with this channel by
* opening the asset file and writing the full-size photo contents into it. (Make sure there
* is no padding around the logo image.) When the file is closed, the image will be parsed,
* sized down if necessary, and stored.
- * </p><p>
- * Usage example:
+ *
+ * <p>Usage example:
* <pre>
* public void writeChannelLogo(long channelId, byte[] logo) {
* Uri channelLogoUri = TvContract.buildChannelLogoUri(channelId);
@@ -776,7 +801,6 @@ public final class TvContract {
* }
* }
* </pre>
- * </p>
*/
public static final class Logo {
@@ -789,7 +813,12 @@ public final class TvContract {
}
}
- /** Column definitions for the TV programs table. */
+ /**
+ * Column definitions for the TV programs table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link Programs#COLUMN_START_TIME_UTC_MILLIS} in ascending order.
+ */
public static final class Programs implements BaseTvColumns {
/** The content:// style URI for this table. */
@@ -804,166 +833,153 @@ public final class TvContract {
/**
* The ID of the TV channel that provides this TV program.
- * <p>
- * This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_CHANNEL_ID = "channel_id";
/**
* The title of this TV program.
- * <p>
- * If this program is an episodic TV show, it is recommended that the title is the series
+ *
+ * <p>If this program is an episodic TV show, it is recommended that the title is the series
* title and its related fields ({@link #COLUMN_SEASON_NUMBER},
* {@link #COLUMN_EPISODE_NUMBER}, and {@link #COLUMN_EPISODE_TITLE}) are filled in.
- * </p><p>
- * Type: TEXT
- * </p>
- **/
+ *
+ * <p>Type: TEXT
+ */
public static final String COLUMN_TITLE = "title";
/**
* The season number of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: INTEGER
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
public static final String COLUMN_SEASON_NUMBER = "season_number";
/**
* The episode number of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: INTEGER
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
public static final String COLUMN_EPISODE_NUMBER = "episode_number";
/**
* The episode title of this TV program for episodic TV shows.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
- **/
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
public static final String COLUMN_EPISODE_TITLE = "episode_title";
/**
* The start time of this TV program, in milliseconds since the epoch.
- * <p>
- * The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
+ *
+ * <p>The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
* previous program in the same channel.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
/**
* The end time of this TV program, in milliseconds since the epoch.
- * <p>
- * The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
+ *
+ * <p>The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
* next program in the same channel.
- * </p><p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
/**
* The comma-separated genre string of this TV program.
- * <p>
- * Use the same language appeared in the underlying broadcast standard, if applicable. (For
- * example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
+ *
+ * <p>Use the same language appeared in the underlying broadcast standard, if applicable.
+ * (For example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
* Content Descriptor of ETSI EN 300 468, if appropriate.) Otherwise, leave empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_BROADCAST_GENRE = "broadcast_genre";
/**
* The comma-separated canonical genre string of this TV program.
- * <p>
- * Canonical genres are defined in {@link Genres}. Use {@link Genres#encode Genres.encode()}
- * to create a text that can be stored in this column. Use {@link Genres#decode
- * Genres.decode()} to get the canonical genre strings from the text stored in this column.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Canonical genres are defined in {@link Genres}. Use
+ * {@link Genres#encode Genres.encode()} to create a text that can be stored in this column.
+ * Use {@link Genres#decode Genres.decode()} to get the canonical genre strings from the
+ * text stored in this column.
+ *
+ * <p>Type: TEXT
* @see Genres
*/
public static final String COLUMN_CANONICAL_GENRE = "canonical_genre";
/**
* The short description of this TV program that is displayed to the user by default.
- * <p>
- * It is recommended to limit the length of the descriptions to 256 characters.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>It is recommended to limit the length of the descriptions to 256 characters.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_SHORT_DESCRIPTION = "short_description";
/**
* The detailed, lengthy description of this TV program that is displayed only when the user
* wants to see more information.
- * <p>
- * TV input services should leave this field empty if they have no additional details beyond
- * {@link #COLUMN_SHORT_DESCRIPTION}.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>TV input services should leave this field empty if they have no additional details
+ * beyond {@link #COLUMN_SHORT_DESCRIPTION}.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_LONG_DESCRIPTION = "long_description";
/**
* The width of the video for this TV program, in the unit of pixels.
- * <p>
- * Together with {@link #COLUMN_VIDEO_HEIGHT} this is used to determine the video resolution
- * of the current TV program. Can be empty if it is not known initially or the program does
- * not convey any video such as the programs from type {@link Channels#SERVICE_TYPE_AUDIO}
- * channels.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_HEIGHT} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VIDEO_WIDTH = "video_width";
/**
* The height of the video for this TV program, in the unit of pixels.
- * <p>
- * Together with {@link #COLUMN_VIDEO_WIDTH} this is used to determine the video resolution
- * of the current TV program. Can be empty if it is not known initially or the program does
- * not convey any video such as the programs from type {@link Channels#SERVICE_TYPE_AUDIO}
- * channels.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_WIDTH} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VIDEO_HEIGHT = "video_height";
/**
* The comma-separated audio languages of this TV program.
- * <p>
- * This is used to describe available audio languages included in the program. Use either
+ *
+ * <p>This is used to describe available audio languages included in the program. Use either
* ISO 639-1 or 639-2/T codes.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_AUDIO_LANGUAGE = "audio_language";
/**
* The comma-separated content ratings of this TV program.
- * <p>
- * This is used to describe the content rating(s) of this program. Each comma-separated
+ *
+ * <p>This is used to describe the content rating(s) of this program. Each comma-separated
* content rating sub-string should be generated by calling
* {@link TvContentRating#flattenToString}. Note that in most cases the program content is
* rated by a single rating system, thus resulting in a corresponding single sub-string that
@@ -972,53 +988,88 @@ public final class TvContract {
* specified as "blocked rating" in the user's parental control settings, the TV input
* service should block the current content and wait for the signal that it is okay to
* unblock.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_CONTENT_RATING = "content_rating";
/**
* The URI for the poster art of this TV program.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_POSTER_ART_URI = "poster_art_uri";
/**
* The URI for the thumbnail of this TV program.
- * <p>
- * Can be empty.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_THUMBNAIL_URI = "thumbnail_uri";
/**
* Internal data used by individual TV input services.
- * <p>
- * This is internal to the provider that inserted it, and should not be decoded by other
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
* apps.
- * </p><p>
- * Type: BLOB
- * </p>
+ *
+ * <p>Type: BLOB
*/
public static final String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
/**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
* The version number of this row entry used by TV input services.
- * <p>
- * This is best used by sync adapters to identify the rows to update. The number can be
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
* defined by individual TV input services. One may assign the same value as
* {@code version_number} in ETSI EN 300 468 or ATSC A/65, if the data are coming from a TV
* broadcast.
- * </p><p>
- * Type: INTEGER
- * </p>
+ *
+ * <p>Type: INTEGER
*/
public static final String COLUMN_VERSION_NUMBER = "version_number";
@@ -1145,6 +1196,9 @@ public final class TvContract {
/**
* Column definitions for the TV programs that the user watched. Applications do not have access
* to this table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link WatchedPrograms#COLUMN_WATCH_START_TIME_UTC_MILLIS} in descending order.
* @hide
*/
@SystemApi
@@ -1163,9 +1217,8 @@ public final class TvContract {
/**
* The UTC time that the user started watching this TV program, in milliseconds since the
* epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_WATCH_START_TIME_UTC_MILLIS =
"watch_start_time_utc_millis";
@@ -1173,49 +1226,43 @@ public final class TvContract {
/**
* The UTC time that the user stopped watching this TV program, in milliseconds since the
* epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_WATCH_END_TIME_UTC_MILLIS = "watch_end_time_utc_millis";
/**
* The ID of the TV channel that provides this TV program.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_CHANNEL_ID = "channel_id";
/**
* The title of this TV program.
- * <p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_TITLE = "title";
/**
* The start time of this TV program, in milliseconds since the epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
/**
* The end time of this TV program, in milliseconds since the epoch.
- * <p>
- * Type: INTEGER (long)
- * </p>
+ *
+ * <p>Type: INTEGER (long)
*/
public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
/**
* The description of this TV program.
- * <p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_DESCRIPTION = "description";
@@ -1223,25 +1270,23 @@ public final class TvContract {
* Extra parameters given to {@link TvInputService.Session#tune(Uri, android.os.Bundle)
* TvInputService.Session.tune(Uri, android.os.Bundle)} when tuning to the channel that
* provides this TV program. (Used internally.)
- * <p>
- * This column contains an encoded string that represents comma-separated key-value pairs of
+ *
+ * <p>This column contains an encoded string that represents comma-separated key-value pairs of
* the tune parameters. (Ex. "[key1]=[value1], [key2]=[value2]"). '%' is used as an escape
* character for '%', '=', and ','.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INTERNAL_TUNE_PARAMS = "tune_params";
/**
* The session token of this TV program. (Used internally.)
- * <p>
- * This contains a String representation of {@link IBinder} for
+ *
+ * <p>This contains a String representation of {@link IBinder} for
* {@link TvInputService.Session} that provides the current TV program. It is used
* internally to distinguish watched programs entries from different TV input sessions.
- * </p><p>
- * Type: TEXT
- * </p>
+ *
+ * <p>Type: TEXT
*/
public static final String COLUMN_INTERNAL_SESSION_TOKEN = "session_token";
diff --git a/media/java/android/media/tv/TvInputInfo.java b/media/java/android/media/tv/TvInputInfo.java
index b9e99d2..46d33b4 100644
--- a/media/java/android/media/tv/TvInputInfo.java
+++ b/media/java/android/media/tv/TvInputInfo.java
@@ -16,6 +16,7 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.annotation.SystemApi;
import android.content.ComponentName;
import android.content.Context;
@@ -116,12 +117,13 @@ public final class TvInputInfo implements Parcelable {
private final ResolveInfo mService;
private final String mId;
private final String mParentId;
+ private final int mType;
+ private final boolean mIsHardwareInput;
// Attributes from XML meta data.
private String mSetupActivity;
private String mSettingsActivity;
- private int mType = TYPE_TUNER;
private HdmiDeviceInfo mHdmiDeviceInfo;
private String mLabel;
private Uri mIconUri;
@@ -153,7 +155,7 @@ public final class TvInputInfo implements Parcelable {
throws XmlPullParserException, IOException {
return createTvInputInfo(context, service, generateInputIdForComponentName(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name)),
- null, TYPE_TUNER, null, null, false);
+ null, TYPE_TUNER, false, null, null, false);
}
/**
@@ -177,7 +179,7 @@ public final class TvInputInfo implements Parcelable {
boolean isConnectedToHdmiSwitch = (hdmiDeviceInfo.getPhysicalAddress() & 0x0FFF) != 0;
TvInputInfo input = createTvInputInfo(context, service, generateInputIdForHdmiDevice(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name),
- hdmiDeviceInfo), parentId, TYPE_HDMI, label, iconUri, isConnectedToHdmiSwitch);
+ hdmiDeviceInfo), parentId, TYPE_HDMI, true, label, iconUri, isConnectedToHdmiSwitch);
input.mHdmiDeviceInfo = hdmiDeviceInfo;
return input;
}
@@ -202,12 +204,12 @@ public final class TvInputInfo implements Parcelable {
int inputType = sHardwareTypeToTvInputType.get(hardwareInfo.getType(), TYPE_TUNER);
return createTvInputInfo(context, service, generateInputIdForHardware(
new ComponentName(service.serviceInfo.packageName, service.serviceInfo.name),
- hardwareInfo), null, inputType, label, iconUri, false);
+ hardwareInfo), null, inputType, true, label, iconUri, false);
}
private static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
- String id, String parentId, int inputType, String label, Uri iconUri,
- boolean isConnectedToHdmiSwitch)
+ String id, String parentId, int inputType, boolean isHardwareInput, String label,
+ Uri iconUri, boolean isConnectedToHdmiSwitch)
throws XmlPullParserException, IOException {
ServiceInfo si = service.serviceInfo;
PackageManager pm = context.getPackageManager();
@@ -233,7 +235,7 @@ public final class TvInputInfo implements Parcelable {
"Meta-data does not start with tv-input-service tag in " + si.name);
}
- TvInputInfo input = new TvInputInfo(service, id, parentId, inputType);
+ TvInputInfo input = new TvInputInfo(service, id, parentId, inputType, isHardwareInput);
TypedArray sa = res.obtainAttributes(attrs,
com.android.internal.R.styleable.TvInputService);
input.mSetupActivity = sa.getString(
@@ -272,12 +274,16 @@ public final class TvInputInfo implements Parcelable {
* @param id ID of this TV input. Should be generated via generateInputId*().
* @param parentId ID of this TV input's parent input. {@code null} if none exists.
* @param type The type of this TV input service.
+ * @param isHardwareInput {@code true} if this TV input represents a hardware device.
+ * {@code false} otherwise.
*/
- private TvInputInfo(ResolveInfo service, String id, String parentId, int type) {
+ private TvInputInfo(ResolveInfo service, String id, String parentId, int type,
+ boolean isHardwareInput) {
mService = service;
mId = id;
mParentId = parentId;
mType = type;
+ mIsHardwareInput = isHardwareInput;
}
/**
@@ -290,18 +296,17 @@ public final class TvInputInfo implements Parcelable {
/**
* Returns the parent input ID.
- * <p>
- * A TV input may have a parent input if the TV input is actually a logical representation of
+ *
+ * <p>A TV input may have a parent input if the TV input is actually a logical representation of
* a device behind the hardware port represented by the parent input.
* For example, a HDMI CEC logical device, connected to a HDMI port, appears as another TV
* input. In this case, the parent input of this logical device is the HDMI port.
- * </p><p>
- * Applications may group inputs by parent input ID to provide an easier access to inputs
+ *
+ * <p>Applications may group inputs by parent input ID to provide an easier access to inputs
* sharing the same physical port. In the example of HDMI CEC, logical HDMI CEC devices behind
* the same HDMI port have the same parent ID, which is the ID representing the port. Thus
* applications can group the hardware HDMI port and the logical HDMI CEC devices behind it
* together using this method.
- * </p>
*
* @return the ID of the parent input, if exists. Returns {@code null} if the parent input is
* not specified.
@@ -381,6 +386,16 @@ public final class TvInputInfo implements Parcelable {
}
/**
+ * Returns {@code true} if this TV input represents a hardware device. (e.g. built-in tuner,
+ * HDMI1) {@code false} otherwise.
+ * @hide
+ */
+ @SystemApi
+ public boolean isHardwareInput() {
+ return mIsHardwareInput;
+ }
+
+ /**
* Returns {@code true}, if a CEC device for this TV input is connected to an HDMI switch, i.e.,
* the device isn't directly connected to a HDMI port.
* @hide
@@ -410,7 +425,7 @@ public final class TvInputInfo implements Parcelable {
* @return a CharSequence containing the TV input's label. If the TV input does not have
* a label, its name is returned.
*/
- public CharSequence loadLabel(Context context) {
+ public CharSequence loadLabel(@NonNull Context context) {
if (TextUtils.isEmpty(mLabel)) {
return mService.loadLabel(context.getPackageManager());
} else {
@@ -438,7 +453,7 @@ public final class TvInputInfo implements Parcelable {
* @return a Drawable containing the TV input's icon. If the TV input does not have an icon,
* application's icon is returned. If it's unavailable too, {@code null} is returned.
*/
- public Drawable loadIcon(Context context) {
+ public Drawable loadIcon(@NonNull Context context) {
if (mIconUri == null) {
return loadServiceIcon(context);
}
@@ -492,13 +507,14 @@ public final class TvInputInfo implements Parcelable {
* @param flags The flags used for parceling.
*/
@Override
- public void writeToParcel(Parcel dest, int flags) {
+ public void writeToParcel(@NonNull Parcel dest, int flags) {
dest.writeString(mId);
dest.writeString(mParentId);
mService.writeToParcel(dest, flags);
dest.writeString(mSetupActivity);
dest.writeString(mSettingsActivity);
dest.writeInt(mType);
+ dest.writeByte(mIsHardwareInput ? (byte) 1 : 0);
dest.writeParcelable(mHdmiDeviceInfo, flags);
dest.writeParcelable(mIconUri, flags);
dest.writeString(mLabel);
@@ -572,6 +588,7 @@ public final class TvInputInfo implements Parcelable {
mSetupActivity = in.readString();
mSettingsActivity = in.readString();
mType = in.readInt();
+ mIsHardwareInput = in.readByte() == 1 ? true : false;
mHdmiDeviceInfo = in.readParcelable(null);
mIconUri = in.readParcelable(null);
mLabel = in.readString();
diff --git a/media/java/android/media/tv/TvInputManager.java b/media/java/android/media/tv/TvInputManager.java
index f55299e..3272a23 100644
--- a/media/java/android/media/tv/TvInputManager.java
+++ b/media/java/android/media/tv/TvInputManager.java
@@ -16,8 +16,11 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.graphics.Rect;
+import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
@@ -37,6 +40,8 @@ import android.view.KeyEvent;
import android.view.Surface;
import android.view.View;
+import com.android.internal.util.Preconditions;
+
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
@@ -51,7 +56,7 @@ public final class TvInputManager {
private static final String TAG = "TvInputManager";
static final int VIDEO_UNAVAILABLE_REASON_START = 0;
- static final int VIDEO_UNAVAILABLE_REASON_END = 3;
+ static final int VIDEO_UNAVAILABLE_REASON_END = 4;
/**
* A generic reason. Video is not available due to an unspecified error.
@@ -69,42 +74,66 @@ public final class TvInputManager {
* Video is not available because the TV input stopped the playback temporarily to buffer more
* data.
*/
- public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = VIDEO_UNAVAILABLE_REASON_END;
+ public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = 3;
+ /**
+ * Video is not available because the current program is audio-only.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY = VIDEO_UNAVAILABLE_REASON_END;
/**
- * The TV input is in unknown state.
- * <p>
- * State for denoting unknown TV input state. The typical use case is when a requested TV
- * input is removed from the device or it is not registered. Used in
- * {@code ITvInputManager.getTvInputState()}.
- * </p>
- * @hide
+ * Status prior to calling {@link TvInputService.Session#notifyTimeShiftStatusChanged}.
+ */
+ public static final int TIME_SHIFT_STATUS_UNKNOWN = 0;
+
+ /**
+ * The TV input does not support time shifting.
*/
- public static final int INPUT_STATE_UNKNOWN = -1;
+ public static final int TIME_SHIFT_STATUS_UNSUPPORTED = 1;
+
+ /**
+ * Time shifting is currently not available but might work again later.
+ */
+ public static final int TIME_SHIFT_STATUS_UNAVAILABLE = 2;
+
+ /**
+ * Time shifting is currently available. In this status, the application assumes it can
+ * pause/resume playback, seek to a specified time position and set playback rate and audio
+ * mode.
+ */
+ public static final int TIME_SHIFT_STATUS_AVAILABLE = 3;
+
+ public static final long TIME_SHIFT_INVALID_TIME = Long.MIN_VALUE;
/**
* The TV input is connected.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ *
+ * <p>This state indicates that a source device is connected to the input port and is in the
+ * normal operation mode. It is mostly relevant to hardware inputs such as HDMI input. This is
+ * the default state for any hardware inputs where their states are unknown. Non-hardware inputs
+ * are considered connected all the time.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_CONNECTED = 0;
/**
- * The TV input is connected but in standby mode. It would take a while until it becomes
- * fully ready.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ * The TV input is connected but in standby mode.
+ *
+ * <p>This state indicates that a source device is connected to the input port but is in standby
+ * mode. It is mostly relevant to hardware inputs such as HDMI input.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_CONNECTED_STANDBY = 1;
/**
* The TV input is disconnected.
- * <p>
- * State for {@link #getInputState} and {@link
- * TvInputManager.TvInputCallback#onInputStateChanged}.
- * </p>
+ *
+ * <p>This state indicates that a source device is disconnected from the input port. It is
+ * mostly relevant to hardware inputs such as HDMI input.
+ *
+ * @see #getInputState
+ * @see TvInputManager.TvInputCallback#onInputStateChanged
*/
public static final int INPUT_STATE_DISCONNECTED = 2;
@@ -124,15 +153,17 @@ public final class TvInputManager {
/**
* Broadcast intent action used to query available content rating systems.
- * <p>
- * The TV input manager service locates available content rating systems by querying broadcast
- * receivers that are registered for this action. An application can offer additional content
- * rating systems to the user by declaring a suitable broadcast receiver in its manifest.
- * </p><p>
- * Here is an example broadcast receiver declaration that an application might include in its
+ *
+ * <p>The TV input manager service locates available content rating systems by querying
+ * broadcast receivers that are registered for this action. An application can offer additional
+ * content rating systems to the user by declaring a suitable broadcast receiver in its
+ * manifest.
+ *
+ * <p>Here is an example broadcast receiver declaration that an application might include in its
* AndroidManifest.xml to advertise custom content rating systems. The meta-data specifies a
* resource that contains a description of each content rating system that is provided by the
* application.
+ *
* <p><pre class="prettyprint">
* {@literal
* <receiver android:name=".TvInputReceiver">
@@ -143,13 +174,13 @@ public final class TvInputManager {
* <meta-data
* android:name="android.media.tv.metadata.CONTENT_RATING_SYSTEMS"
* android:resource="@xml/tv_content_rating_systems" />
- * </receiver>}</pre></p>
- * In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
+ * </receiver>}</pre>
+ *
+ * <p>In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
* XML resource whose root element is <code>&lt;rating-system-definitions&gt;</code> that
* contains zero or more <code>&lt;rating-system-definition&gt;</code> elements. Each <code>
* &lt;rating-system-definition&gt;</code> element specifies the ratings, sub-ratings and rating
* orders of a particular content rating system.
- * </p>
*
* @see TvContentRating
*/
@@ -158,10 +189,9 @@ public final class TvInputManager {
/**
* Content rating systems metadata associated with {@link #ACTION_QUERY_CONTENT_RATING_SYSTEMS}.
- * <p>
- * Specifies the resource ID of an XML resource that describes the content rating systems that
- * are provided by the application.
- * </p>
+ *
+ * <p>Specifies the resource ID of an XML resource that describes the content rating systems
+ * that are provided by the application.
*/
public static final String META_DATA_CONTENT_RATING_SYSTEMS =
"android.media.tv.metadata.CONTENT_RATING_SYSTEMS";
@@ -204,7 +234,7 @@ public final class TvInputManager {
* @param session A {@link TvInputManager.Session} instance created. This can be
* {@code null} if the creation request failed.
*/
- public void onSessionCreated(Session session) {
+ public void onSessionCreated(@Nullable Session session) {
}
/**
@@ -245,7 +275,7 @@ public final class TvInputManager {
* @param trackId The ID of the selected track. When {@code null} the currently selected
* track for a given type should be unselected.
*/
- public void onTrackSelected(Session session, int type, String trackId) {
+ public void onTrackSelected(Session session, int type, @Nullable String trackId) {
}
/**
@@ -271,13 +301,14 @@ public final class TvInputManager {
/**
* This is called when the video is not available, so the TV input stops the playback.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param reason The reason why the TV input stopped the playback:
* <ul>
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onVideoUnavailable(Session session, int reason) {
@@ -287,7 +318,7 @@ public final class TvInputManager {
* This is called when the current program content turns out to be allowed to watch since
* its content rating is not blocked by parental controls.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
*/
public void onContentAllowed(Session session) {
}
@@ -296,7 +327,7 @@ public final class TvInputManager {
* This is called when the current program content turns out to be not allowed to watch
* since its content rating is blocked by parental controls.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param rating The content ration of the blocked program.
*/
public void onContentBlocked(Session session, TvContentRating rating) {
@@ -306,7 +337,7 @@ public final class TvInputManager {
* This is called when {@link TvInputService.Session#layoutSurface} is called to change the
* layout of surface.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param left Left position.
* @param top Top position.
* @param right Right position.
@@ -328,6 +359,45 @@ public final class TvInputManager {
@SystemApi
public void onSessionEvent(Session session, String eventType, Bundle eventArgs) {
}
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ }
+
+ /**
+ * This is called when the start playback position is changed.
+ *
+ * <p>The start playback position of the time shifted program should be adjusted when the TV
+ * input cannot retain the whole recorded program due to some reason (e.g. limitation on
+ * storage space). This is necessary to prevent the application from allowing the user to
+ * seek to a time position that is not reachable.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The start playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ }
+
+ /**
+ * This is called when the current playback position is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The current playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ }
}
private static final class SessionCallbackRecord {
@@ -450,10 +520,37 @@ public final class TvInputManager {
}
});
}
+
+ void postTimeShiftStatusChanged(final int status) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStatusChanged(mSession, status);
+ }
+ });
+ }
+
+ void postTimeShiftStartPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStartPositionChanged(mSession, timeMs);
+ }
+ });
+ }
+
+ void postTimeShiftCurrentPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(mSession, timeMs);
+ }
+ });
+ }
}
/**
- * Callback used to monitor status of the TV input.
+ * Callback used to monitor status of the TV inputs.
*/
public abstract static class TvInputCallback {
/**
@@ -471,7 +568,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is added.
+ * This is called when a TV input is added to the system.
+ *
+ * <p>Normally it happens when the user installs a new TV input package that implements
+ * {@link TvInputService} interface.
*
* @param inputId The id of the TV input.
*/
@@ -479,7 +579,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is removed.
+ * This is called when a TV input is removed from the system.
+ *
+ * <p>Normally it happens when the user uninstalls the previously installed TV input
+ * package.
*
* @param inputId The id of the TV input.
*/
@@ -487,9 +590,10 @@ public final class TvInputManager {
}
/**
- * This is called when a TV input is updated. The update of TV input happens when it is
- * reinstalled or the media on which the newer version of TV input exists is
- * available/unavailable.
+ * This is called when a TV input is updated on the system.
+ *
+ * <p>Normally it happens when a previously installed TV input package is re-installed or
+ * the media on which a newer version of the package exists becomes available/unavailable.
*
* @param inputId The id of the TV input.
* @hide
@@ -718,6 +822,42 @@ public final class TvInputManager {
record.postSessionEvent(eventType, eventArgs);
}
}
+
+ @Override
+ public void onTimeShiftStatusChanged(int status, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStatusChanged(status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStartPositionChanged(timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftCurrentPositionChanged(timeMs);
+ }
+ }
};
mManagerCallback = new ITvInputManagerCallback.Stub() {
@Override
@@ -766,15 +906,12 @@ public final class TvInputManager {
synchronized (mLock) {
for (TvInputInfo info : infos) {
String inputId = info.getId();
- int state = mService.getTvInputState(inputId, mUserId);
- if (state != INPUT_STATE_UNKNOWN) {
- mStateMap.put(inputId, state);
- }
+ mStateMap.put(inputId, mService.getTvInputState(inputId, mUserId));
}
}
}
} catch (RemoteException e) {
- Log.e(TAG, "TvInputManager initialization failed: " + e);
+ Log.e(TAG, "TvInputManager initialization failed", e);
}
}
@@ -797,10 +934,9 @@ public final class TvInputManager {
* @param inputId The ID of the TV input.
* @return the {@link TvInputInfo} for a given TV input. {@code null} if not found.
*/
- public TvInputInfo getTvInputInfo(String inputId) {
- if (inputId == null) {
- throw new IllegalArgumentException("inputId cannot be null");
- }
+ @Nullable
+ public TvInputInfo getTvInputInfo(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
try {
return mService.getTvInputInfo(inputId, mUserId);
} catch (RemoteException e) {
@@ -809,7 +945,9 @@ public final class TvInputManager {
}
/**
- * Returns the state of a given TV input. It returns one of the following:
+ * Returns the state of a given TV input.
+ *
+ * <p>The state is one of the following:
* <ul>
* <li>{@link #INPUT_STATE_CONNECTED}
* <li>{@link #INPUT_STATE_CONNECTED_STANDBY}
@@ -817,17 +955,15 @@ public final class TvInputManager {
* </ul>
*
* @param inputId The id of the TV input.
- * @throws IllegalArgumentException if the argument is {@code null} or if there is no
- * {@link TvInputInfo} corresponding to {@code inputId}.
+ * @throws IllegalArgumentException if the argument is {@code null}.
*/
- public int getInputState(String inputId) {
- if (inputId == null) {
- throw new IllegalArgumentException("inputId cannot be null");
- }
+ public int getInputState(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
synchronized (mLock) {
Integer state = mStateMap.get(inputId);
if (state == null) {
- throw new IllegalArgumentException("Unrecognized input ID: " + inputId);
+ Log.w(TAG, "Unrecognized input ID: " + inputId);
+ return INPUT_STATE_DISCONNECTED;
}
return state.intValue();
}
@@ -838,15 +974,10 @@ public final class TvInputManager {
*
* @param callback A callback used to monitor status of the TV inputs.
* @param handler A {@link Handler} that the status change will be delivered to.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
*/
- public void registerCallback(TvInputCallback callback, Handler handler) {
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
- if (handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public void registerCallback(@NonNull TvInputCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
synchronized (mLock) {
mCallbackRecords.add(new TvInputCallbackRecord(callback, handler));
}
@@ -856,12 +987,9 @@ public final class TvInputManager {
* Unregisters the existing {@link TvInputCallback}.
*
* @param callback The existing callback to remove.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
*/
- public void unregisterCallback(final TvInputCallback callback) {
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
+ public void unregisterCallback(@NonNull final TvInputCallback callback) {
+ Preconditions.checkNotNull(callback);
synchronized (mLock) {
for (Iterator<TvInputCallbackRecord> it = mCallbackRecords.iterator();
it.hasNext(); ) {
@@ -910,10 +1038,8 @@ public final class TvInputManager {
* @param rating The TV content rating to check.
* @return {@code true} if the given TV content rating is blocked, {@code false} otherwise.
*/
- public boolean isRatingBlocked(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public boolean isRatingBlocked(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
return mService.isRatingBlocked(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -949,10 +1075,8 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
- public void addBlockedRating(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public void addBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
mService.addBlockedRating(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -969,10 +1093,8 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
- public void removeBlockedRating(TvContentRating rating) {
- if (rating == null) {
- throw new IllegalArgumentException("rating cannot be null");
- }
+ public void removeBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
try {
mService.removeBlockedRating(rating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -995,29 +1117,21 @@ public final class TvInputManager {
/**
* Creates a {@link Session} for a given TV input.
- * <p>
- * The number of sessions that can be created at the same time is limited by the capability of
- * the given TV input.
- * </p>
+ *
+ * <p>The number of sessions that can be created at the same time is limited by the capability
+ * of the given TV input.
*
* @param inputId The id of the TV input.
* @param callback A callback used to receive the created session.
* @param handler A {@link Handler} that the session creation will be delivered to.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
* @hide
*/
@SystemApi
- public void createSession(String inputId, final SessionCallback callback,
- Handler handler) {
- if (inputId == null) {
- throw new IllegalArgumentException("id cannot be null");
- }
- if (callback == null) {
- throw new IllegalArgumentException("callback cannot be null");
- }
- if (handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public void createSession(@NonNull String inputId, @NonNull final SessionCallback callback,
+ @NonNull Handler handler) {
+ Preconditions.checkNotNull(inputId);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
SessionCallbackRecord record = new SessionCallbackRecord(callback, handler);
synchronized (mSessionCallbackRecordMap) {
int seq = mNextSeq++;
@@ -1171,22 +1285,22 @@ public final class TvInputManager {
private TvInputEventSender mSender;
private InputChannel mChannel;
- private final Object mTrackLock = new Object();
- // @GuardedBy("mTrackLock")
+ private final Object mMetadataLock = new Object();
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mAudioTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mVideoTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private final List<TvTrackInfo> mSubtitleTracks = new ArrayList<TvTrackInfo>();
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedAudioTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedVideoTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private String mSelectedSubtitleTrackId;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private int mVideoWidth;
- // @GuardedBy("mTrackLock")
+ // @GuardedBy("mMetadataLock")
private int mVideoHeight;
private Session(IBinder token, InputChannel channel, ITvInputManager service, int userId,
@@ -1299,7 +1413,6 @@ public final class TvInputManager {
* Tunes to a given channel.
*
* @param channelUri The URI of a channel.
- * @throws IllegalArgumentException if the argument is {@code null}.
*/
public void tune(Uri channelUri) {
tune(channelUri, null);
@@ -1310,19 +1423,16 @@ public final class TvInputManager {
*
* @param channelUri The URI of a channel.
* @param params A set of extra parameters which might be handled with this tune event.
- * @throws IllegalArgumentException if {@code channelUri} is {@code null}.
* @hide
*/
@SystemApi
- public void tune(Uri channelUri, Bundle params) {
- if (channelUri == null) {
- throw new IllegalArgumentException("channelUri cannot be null");
- }
+ public void tune(@NonNull Uri channelUri, Bundle params) {
+ Preconditions.checkNotNull(channelUri);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
}
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
mAudioTracks.clear();
mVideoTracks.clear();
mSubtitleTracks.clear();
@@ -1366,8 +1476,8 @@ public final class TvInputManager {
* track of the given type will be unselected.
* @see #getTracks
*/
- public void selectTrack(int type, String trackId) {
- synchronized (mTrackLock) {
+ public void selectTrack(int type, @Nullable String trackId) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
if (trackId != null && !containsTrack(mAudioTracks, trackId)) {
Log.w(TAG, "Invalid audio trackId: " + trackId);
@@ -1415,8 +1525,9 @@ public final class TvInputManager {
* {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
* @return the list of tracks for the given type.
*/
+ @Nullable
public List<TvTrackInfo> getTracks(int type) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
if (mAudioTracks == null) {
return null;
@@ -1444,8 +1555,9 @@ public final class TvInputManager {
* @return the ID of the selected track.
* @see #selectTrack
*/
+ @Nullable
public String getSelectedTrack(int type) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO) {
return mSelectedAudioTrackId;
} else if (type == TvTrackInfo.TYPE_VIDEO) {
@@ -1462,7 +1574,7 @@ public final class TvInputManager {
* there is an update.
*/
boolean updateTracks(List<TvTrackInfo> tracks) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
mAudioTracks.clear();
mVideoTracks.clear();
mSubtitleTracks.clear();
@@ -1485,7 +1597,7 @@ public final class TvInputManager {
* Returns true if there is an update.
*/
boolean updateTrackSelection(int type, String trackId) {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (type == TvTrackInfo.TYPE_AUDIO && trackId != mSelectedAudioTrackId) {
mSelectedAudioTrackId = trackId;
return true;
@@ -1509,7 +1621,7 @@ public final class TvInputManager {
* track.
*/
TvTrackInfo getVideoTrackToNotify() {
- synchronized (mTrackLock) {
+ synchronized (mMetadataLock) {
if (!mVideoTracks.isEmpty() && mSelectedVideoTrackId != null) {
for (TvTrackInfo track : mVideoTracks) {
if (track.getId().equals(mSelectedVideoTrackId)) {
@@ -1528,6 +1640,100 @@ public final class TvInputManager {
}
/**
+ * Pauses the playback. Call {@link #timeShiftResume()} to restart the playback.
+ */
+ void timeShiftPause() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftPause(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Resumes the playback. No-op if it is already playing the channel.
+ */
+ void timeShiftResume() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftResume(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Seeks to a specified time position.
+ *
+ * <p>Normally, the position is given within range between the start and the current time,
+ * inclusively.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see TvView.TimeShiftPositionCallback#onTimeShiftStartPositionChanged
+ */
+ void timeShiftSeekTo(long timeMs) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftSeekTo(mToken, timeMs, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Sets playback rate and audio mode.
+ *
+ * @param rate The ratio between desired playback rate and normal one.
+ * @param audioMode Audio playback mode. Must be one of the supported audio modes:
+ * <ul>
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_DEFAULT}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE}
+ * </ul>
+ */
+ void timeShiftSetPlaybackRate(float rate, int audioMode) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ if (audioMode != MediaPlayer.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE) {
+ throw new IllegalArgumentException("Unknown audio playback mode " + audioMode);
+ }
+ try {
+ mService.timeShiftSetPlaybackRate(mToken, rate, audioMode, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftEnablePositionTracking(mToken, enable, mUserId);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
@@ -1559,16 +1765,11 @@ public final class TvInputManager {
*
* @param view A view playing TV.
* @param frame A position of the overlay view.
- * @throws IllegalArgumentException if any of the arguments is {@code null}.
* @throws IllegalStateException if {@code view} is not attached to a window.
*/
- void createOverlayView(View view, Rect frame) {
- if (view == null) {
- throw new IllegalArgumentException("view cannot be null");
- }
- if (frame == null) {
- throw new IllegalArgumentException("frame cannot be null");
- }
+ void createOverlayView(@NonNull View view, @NonNull Rect frame) {
+ Preconditions.checkNotNull(view);
+ Preconditions.checkNotNull(frame);
if (view.getWindowToken() == null) {
throw new IllegalStateException("view must be attached to a window");
}
@@ -1587,12 +1788,9 @@ public final class TvInputManager {
* Relayouts the current overlay view.
*
* @param frame A new position of the overlay view.
- * @throws IllegalArgumentException if the arguments is {@code null}.
*/
- void relayoutOverlayView(Rect frame) {
- if (frame == null) {
- throw new IllegalArgumentException("frame cannot be null");
- }
+ void relayoutOverlayView(@NonNull Rect frame) {
+ Preconditions.checkNotNull(frame);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
@@ -1622,14 +1820,12 @@ public final class TvInputManager {
/**
* Requests to unblock content blocked by parental controls.
*/
- void requestUnblockContent(TvContentRating unblockedRating) {
+ void requestUnblockContent(@NonNull TvContentRating unblockedRating) {
+ Preconditions.checkNotNull(unblockedRating);
if (mToken == null) {
Log.w(TAG, "The session has been already released");
return;
}
- if (unblockedRating == null) {
- throw new IllegalArgumentException("unblockedRating cannot be null");
- }
try {
mService.requestUnblockContent(mToken, unblockedRating.flattenToString(), mUserId);
} catch (RemoteException e) {
@@ -1640,25 +1836,22 @@ public final class TvInputManager {
/**
* Dispatches an input event to this session.
*
- * @param event An {@link InputEvent} to dispatch.
+ * @param event An {@link InputEvent} to dispatch. Cannot be {@code null}.
* @param token A token used to identify the input event later in the callback.
- * @param callback A callback used to receive the dispatch result.
- * @param handler A {@link Handler} that the dispatch result will be delivered to.
+ * @param callback A callback used to receive the dispatch result. Cannot be {@code null}.
+ * @param handler A {@link Handler} that the dispatch result will be delivered to. Cannot be
+ * {@code null}.
* @return Returns {@link #DISPATCH_HANDLED} if the event was handled. Returns
* {@link #DISPATCH_NOT_HANDLED} if the event was not handled. Returns
* {@link #DISPATCH_IN_PROGRESS} if the event is in progress and the callback will
* be invoked later.
- * @throws IllegalArgumentException if any of the necessary arguments is {@code null}.
* @hide
*/
- public int dispatchInputEvent(InputEvent event, Object token,
- FinishedInputEventCallback callback, Handler handler) {
- if (event == null) {
- throw new IllegalArgumentException("event cannot be null");
- }
- if (callback != null && handler == null) {
- throw new IllegalArgumentException("handler cannot be null");
- }
+ public int dispatchInputEvent(@NonNull InputEvent event, Object token,
+ @NonNull FinishedInputEventCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(event);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
synchronized (mHandler) {
if (mChannel == null) {
return DISPATCH_NOT_HANDLED;
@@ -1886,7 +2079,7 @@ public final class TvInputManager {
/**
* The Hardware provides the per-hardware functionality of TV hardware.
*
- * TV hardware is physical hardware attached to the Android device; for example, HDMI ports,
+ * <p>TV hardware is physical hardware attached to the Android device; for example, HDMI ports,
* Component/Composite ports, etc. Specifically, logical devices such as HDMI CEC logical
* devices don't fall into this category.
*
diff --git a/media/java/android/media/tv/TvInputService.java b/media/java/android/media/tv/TvInputService.java
index cf1b441..34c36c3 100644
--- a/media/java/android/media/tv/TvInputService.java
+++ b/media/java/android/media/tv/TvInputService.java
@@ -16,6 +16,8 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SuppressLint;
import android.annotation.SystemApi;
import android.app.Service;
@@ -49,6 +51,7 @@ import android.view.accessibility.CaptioningManager;
import android.widget.FrameLayout;
import com.android.internal.os.SomeArgs;
+import com.android.internal.util.Preconditions;
import java.util.ArrayList;
import java.util.HashSet;
@@ -58,15 +61,14 @@ import java.util.Set;
/**
* The TvInputService class represents a TV input or source such as HDMI or built-in tuner which
* provides pass-through video or broadcast TV programs.
- * <p>
- * Applications will not normally use this service themselves, instead relying on the standard
+ *
+ * <p>Applications will not normally use this service themselves, instead relying on the standard
* interaction provided by {@link TvView}. Those implementing TV input services should normally do
* so by deriving from this class and providing their own session implementation based on
* {@link TvInputService.Session}. All TV input services must require that clients hold the
* {@link android.Manifest.permission#BIND_TV_INPUT} in order to interact with the service; if this
* permission is not specified in the manifest, the system will refuse to bind to that TV input
* service.
- * </p>
*/
public abstract class TvInputService extends Service {
private static final boolean DEBUG = false;
@@ -159,13 +161,14 @@ public abstract class TvInputService extends Service {
/**
* Returns a concrete implementation of {@link Session}.
- * <p>
- * May return {@code null} if this TV input service fails to create a session for some reason.
- * If TV input represents an external device connected to a hardware TV input,
+ *
+ * <p>May return {@code null} if this TV input service fails to create a session for some
+ * reason. If TV input represents an external device connected to a hardware TV input,
* {@link HardwareSession} should be returned.
- * </p>
+ *
* @param inputId The ID of the TV input associated with the session.
*/
+ @Nullable
public abstract Session onCreateSession(String inputId);
/**
@@ -176,6 +179,7 @@ public abstract class TvInputService extends Service {
* @param hardwareInfo {@link TvInputHardwareInfo} object just added.
* @hide
*/
+ @Nullable
@SystemApi
public TvInputInfo onHardwareAdded(TvInputHardwareInfo hardwareInfo) {
return null;
@@ -189,6 +193,7 @@ public abstract class TvInputService extends Service {
* @param hardwareInfo {@link TvInputHardwareInfo} object just removed.
* @hide
*/
+ @Nullable
@SystemApi
public String onHardwareRemoved(TvInputHardwareInfo hardwareInfo) {
return null;
@@ -202,6 +207,7 @@ public abstract class TvInputService extends Service {
* @param deviceInfo {@link HdmiDeviceInfo} object just added.
* @hide
*/
+ @Nullable
@SystemApi
public TvInputInfo onHdmiDeviceAdded(HdmiDeviceInfo deviceInfo) {
return null;
@@ -215,6 +221,7 @@ public abstract class TvInputService extends Service {
* @param deviceInfo {@link HdmiDeviceInfo} object just removed.
* @hide
*/
+ @Nullable
@SystemApi
public String onHdmiDeviceRemoved(HdmiDeviceInfo deviceInfo) {
return null;
@@ -235,7 +242,9 @@ public abstract class TvInputService extends Service {
* Base class for derived classes to implement to provide a TV input session.
*/
public abstract static class Session implements KeyEvent.Callback {
- private static final int DETACH_OVERLAY_VIEW_TIMEOUT = 5000;
+ private static final int DETACH_OVERLAY_VIEW_TIMEOUT_MS = 5000;
+ private static final int POSITION_UPDATE_INTERVAL_MS = 1000;
+
private final KeyEvent.DispatcherState mDispatcherState = new KeyEvent.DispatcherState();
private final WindowManager mWindowManager;
final Handler mHandler;
@@ -248,6 +257,10 @@ public abstract class TvInputService extends Service {
private boolean mOverlayViewEnabled;
private IBinder mWindowToken;
private Rect mOverlayFrame;
+ private long mStartPositionMs;
+ private long mCurrentPositionMs;
+ private final TimeShiftPositionTrackingRunnable
+ mTimeShiftPositionTrackingRunnable = new TimeShiftPositionTrackingRunnable();
private final Object mLock = new Object();
// @GuardedBy("mLock")
@@ -264,6 +277,7 @@ public abstract class TvInputService extends Service {
mContext = context;
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
mHandler = new Handler(context.getMainLooper());
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
}
/**
@@ -300,10 +314,8 @@ public abstract class TvInputService extends Service {
* @hide
*/
@SystemApi
- public void notifySessionEvent(final String eventType, final Bundle eventArgs) {
- if (eventType == null) {
- throw new IllegalArgumentException("eventType should not be null.");
- }
+ public void notifySessionEvent(@NonNull final String eventType, final Bundle eventArgs) {
+ Preconditions.checkNotNull(eventType);
executeOrPostRunnable(new Runnable() {
@Override
public void run() {
@@ -313,7 +325,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onSessionEvent(eventType, eventArgs);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in sending event (event=" + eventType + ")");
+ Log.w(TAG, "error in sending event (event=" + eventType + ")", e);
}
}
});
@@ -334,7 +346,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onChannelRetuned(channelUri);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyChannelRetuned");
+ Log.w(TAG, "error in notifyChannelRetuned", e);
}
}
});
@@ -373,7 +385,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onTracksChanged(tracks);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyTracksChanged");
+ Log.w(TAG, "error in notifyTracksChanged", e);
}
}
});
@@ -403,7 +415,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onTrackSelected(type, trackId);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyTrackSelected");
+ Log.w(TAG, "error in notifyTrackSelected", e);
}
}
});
@@ -426,7 +438,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onVideoAvailable();
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyVideoAvailable");
+ Log.w(TAG, "error in notifyVideoAvailable", e);
}
}
});
@@ -443,6 +455,7 @@ public abstract class TvInputService extends Service {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
* @see #notifyVideoAvailable
*/
@@ -460,7 +473,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onVideoUnavailable(reason);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyVideoUnavailable");
+ Log.w(TAG, "error in notifyVideoUnavailable", e);
}
}
});
@@ -468,8 +481,8 @@ public abstract class TvInputService extends Service {
/**
* Informs the application that the user is allowed to watch the current program content.
- * <p>
- * Each TV input service is required to query the system whether the user is allowed to
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
* watch the current program before showing it to the user if the parental controls is
* enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
* TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
@@ -480,13 +493,12 @@ public abstract class TvInputService extends Service {
* result. If the rating in question turns out to be allowed by the user, the TV input
* service must call this method to notify the application that is permitted to show the
* content.
- * </p><p>
- * Each TV input service also needs to continuously listen to any changes made to the
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
* parental controls settings by registering a broadcast receiver to receive
* {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
* {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
* reevaluate the current program with the new parental controls settings.
- * </p>
*
* @see #notifyContentBlocked
* @see TvInputManager
@@ -501,7 +513,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onContentAllowed();
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyContentAllowed");
+ Log.w(TAG, "error in notifyContentAllowed", e);
}
}
});
@@ -509,8 +521,8 @@ public abstract class TvInputService extends Service {
/**
* Informs the application that the current program content is blocked by parent controls.
- * <p>
- * Each TV input service is required to query the system whether the user is allowed to
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
* watch the current program before showing it to the user if the parental controls is
* enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
* TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
@@ -521,19 +533,19 @@ public abstract class TvInputService extends Service {
* result. If the rating in question turns out to be blocked, the TV input service must
* immediately block the content and call this method with the content rating of the current
* program to prompt the PIN verification screen.
- * </p><p>
- * Each TV input service also needs to continuously listen to any changes made to the
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
* parental controls settings by registering a broadcast receiver to receive
* {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
* {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
* reevaluate the current program with the new parental controls settings.
- * </p>
*
* @param rating The content rating for the current TV program.
* @see #notifyContentAllowed
* @see TvInputManager
*/
- public void notifyContentBlocked(final TvContentRating rating) {
+ public void notifyContentBlocked(@NonNull final TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
executeOrPostRunnable(new Runnable() {
@Override
public void run() {
@@ -543,7 +555,80 @@ public abstract class TvInputService extends Service {
mSessionCallback.onContentBlocked(rating.flattenToString());
}
} catch (RemoteException e) {
- Log.w(TAG, "error in notifyContentBlocked");
+ Log.w(TAG, "error in notifyContentBlocked", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the time shift status is changed.
+ *
+ * <p>Prior to calling this method, the application assumes the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_UNKNOWN}. Right after the session is created, it
+ * is important to invoke the method with the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} if the implementation does support
+ * time shifting, or {@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED} otherwise. Failure
+ * to notifying the current status change immediately might result in an undesirable
+ * behavior in the application such as hiding the play controls.
+ *
+ * <p>If the status {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} is reported, the
+ * application assumes it can pause/resume playback, seek to a specified time position and
+ * set playback rate and audio mode. The implementation should override
+ * {@link #onTimeShiftPause}, {@link #onTimeShiftResume}, {@link #onTimeShiftSeekTo},
+ * {@link #onTimeShiftGetStartPosition}, {@link #onTimeShiftGetCurrentPosition} and
+ * {@link #onTimeShiftSetPlaybackRate}.
+ *
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void notifyTimeShiftStatusChanged(final int status) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStatusChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStatusChanged(status);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStatusChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftStartPositionChanged(final long timeMs) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStartPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStartPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStartPositionChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftCurrentPositionChanged(final long timeMs) {
+ executeOrPostRunnable(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftCurrentPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftCurrentPositionChanged", e);
}
}
});
@@ -576,7 +661,7 @@ public abstract class TvInputService extends Service {
mSessionCallback.onLayoutSurface(left, top, right, bottom);
}
} catch (RemoteException e) {
- Log.w(TAG, "error in layoutSurface");
+ Log.w(TAG, "error in layoutSurface", e);
}
}
});
@@ -590,21 +675,20 @@ public abstract class TvInputService extends Service {
/**
* Sets the current session as the main session. The main session is a session whose
* corresponding TV input determines the HDMI-CEC active source device.
- * <p>
- * TV input service that manages HDMI-CEC logical device should implement {@link
+ *
+ * <p>TV input service that manages HDMI-CEC logical device should implement {@link
* #onSetMain} to (1) select the corresponding HDMI logical device as the source device
* when {@code isMain} is {@code true}, and to (2) select the internal device (= TV itself)
* as the source device when {@code isMain} is {@code false} and the session is still main.
* Also, if a surface is passed to a non-main session and active source is changed to
* initiate the surface, the active source should be returned to the main session.
- * </p><p>
- * {@link TvView} guarantees that, when tuning involves a session transition, {@code
+ *
+ * <p>{@link TvView} guarantees that, when tuning involves a session transition, {@code
* onSetMain(true)} for new session is called first, {@code onSetMain(false)} for old
* session is called afterwards. This allows {@code onSetMain(false)} to be no-op when TV
* input service knows that the next main session corresponds to another HDMI logical
* device. Practically, this implies that one TV input service should handle all HDMI port
* and HDMI-CEC logical devices for smooth active source transition.
- * </p>
*
* @param isMain If true, session should become main.
* @see TvView#setMain
@@ -617,15 +701,15 @@ public abstract class TvInputService extends Service {
/**
* Sets the {@link Surface} for the current input session on which the TV input renders
* video.
- * <p>
- * When {@code setSurface(null)} is called, the implementation should stop using the Surface
- * object previously given and release any references to it.
*
- * @param surface possibly {@code null} {@link Surface} an application passes to this TV
+ * <p>When {@code setSurface(null)} is called, the implementation should stop using the
+ * Surface object previously given and release any references to it.
+ *
+ * @param surface possibly {@code null} {@link Surface} the application passes to this TV
* input session.
* @return {@code true} if the surface was set, {@code false} otherwise.
*/
- public abstract boolean onSetSurface(Surface surface);
+ public abstract boolean onSetSurface(@Nullable Surface surface);
/**
* Called after any structural changes (format or size) have been made to the
@@ -640,10 +724,10 @@ public abstract class TvInputService extends Service {
}
/**
- * Called when a size of an overlay view is changed by an application. Even when the overlay
- * view is disabled by {@link #setOverlayViewEnabled}, this is called. The size is same as
- * the size of {@link Surface} in general. Once {@link #layoutSurface} is called, the sizes
- * of {@link Surface} and the overlay view can be different.
+ * Called when a size of an overlay view is changed by the application. Even when the
+ * overlay view is disabled by {@link #setOverlayViewEnabled}, this is called. The size is
+ * same as the size of {@link Surface} in general. Once {@link #layoutSurface} is called,
+ * the sizes of {@link Surface} and the overlay view can be different.
*
* @param width The width of the overlay view.
* @param height The height of the overlay view.
@@ -654,10 +738,14 @@ public abstract class TvInputService extends Service {
}
/**
- * Sets the relative stream volume of the current TV input session to handle the change of
- * audio focus by setting.
+ * Sets the relative stream volume of the current TV input session.
*
- * @param volume Volume scale from 0.0 to 1.0.
+ * <p>The implementation should honor this request in order to handle audio focus changes or
+ * mute the current session when multiple sessions, possibly from different inputs are
+ * active. If the method has not yet been called, the implementation should assume the
+ * default value of {@code 1.0f}.
+ *
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
*/
public abstract void onSetStreamVolume(float volume);
@@ -686,8 +774,8 @@ public abstract class TvInputService extends Service {
/**
* Enables or disables the caption.
- * <p>
- * The locale for the user's preferred captioning language can be obtained by calling
+ *
+ * <p>The locale for the user's preferred captioning language can be obtained by calling
* {@link CaptioningManager#getLocale CaptioningManager.getLocale()}.
*
* @param enabled {@code true} to enable, {@code false} to disable.
@@ -697,14 +785,13 @@ public abstract class TvInputService extends Service {
/**
* Requests to unblock the content according to the given rating.
- * <p>
- * The implementation should unblock the content.
+ *
+ * <p>The implementation should unblock the content.
* TV input service has responsibility to decide when/how the unblock expires
* while it can keep previously unblocked ratings in order not to ask a user
* to unblock whenever a content rating is changed.
* Therefore an unblocked rating can be valid for a channel, a program,
* or certain amount of time depending on the implementation.
- * </p>
*
* @param unblockedRating An unblocked content rating
*/
@@ -713,10 +800,10 @@ public abstract class TvInputService extends Service {
/**
* Selects a given track.
- * <p>
- * If this is done successfully, the implementation should call {@link #notifyTrackSelected}
- * to help applications maintain the up-to-date list of the selected tracks.
- * </p>
+ *
+ * <p>If this is done successfully, the implementation should call
+ * {@link #notifyTrackSelected} to help applications maintain the up-to-date list of the
+ * selected tracks.
*
* @param trackId The ID of the track to select. {@code null} means to unselect the current
* track for a given type.
@@ -726,7 +813,7 @@ public abstract class TvInputService extends Service {
* @return {@code true} if the track selection was successful, {@code false} otherwise.
* @see #notifyTrackSelected
*/
- public boolean onSelectTrack(int type, String trackId) {
+ public boolean onSelectTrack(int type, @Nullable String trackId) {
return false;
}
@@ -742,11 +829,11 @@ public abstract class TvInputService extends Service {
* @hide
*/
@SystemApi
- public void onAppPrivateCommand(String action, Bundle data) {
+ public void onAppPrivateCommand(@NonNull String action, Bundle data) {
}
/**
- * Called when an application requests to create an overlay view. Each session
+ * Called when the application requests to create an overlay view. Each session
* implementation can override this method and return its own view.
*
* @return a view attached to the overlay window
@@ -756,13 +843,111 @@ public abstract class TvInputService extends Service {
}
/**
+ * Called when the application requests to pause playback.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackRate
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftPause() {
+ }
+
+ /**
+ * Called when the application requests to resume playback.
+ *
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackRate
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftResume() {
+ }
+
+ /**
+ * Called when the application requests to seek to a specified time position. Normally, the
+ * position is given within range between the start and the current time, inclusively. The
+ * implementation is expected to seek to the nearest time position if the given position is
+ * not in the range.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSetPlaybackRate
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftSeekTo(long timeMs) {
+ }
+
+ /**
+ * Called when the application sets playback rate and audio mode.
+ *
+ * <p>Once a playback rate is set, the implementation should honor the value until a new
+ * tune request. Pause/resume/seek request does not reset the playback rate previously set.
+ *
+ * @param rate The ratio between desired playback rate and normal one.
+ * @param audioMode Audio playback mode. Must be one of the supported audio modes:
+ * <ul>
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_DEFAULT}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE}
+ * </ul>
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftGetStartPosition
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public void onTimeShiftSetPlaybackRate(float rate, int audioMode) {
+ }
+
+ /**
+ * Returns the start playback position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * <p>The start playback position of the time shifted program should be adjusted when the
+ * implementation cannot retain the whole recorded program due to some reason (e.g.
+ * limitation on storage space). It is the earliest possible time position that the user can
+ * seek to, thus failure to notifying its change immediately might result in bad experience
+ * where the application allows the user to seek to an invalid time position.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackRate
+ * @see #onTimeShiftGetCurrentPosition
+ */
+ public long onTimeShiftGetStartPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
+ * Returns the current playback position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * @see #onTimeShiftResume
+ * @see #onTimeShiftPause
+ * @see #onTimeShiftSeekTo
+ * @see #onTimeShiftSetPlaybackRate
+ * @see #onTimeShiftGetStartPosition
+ */
+ public long onTimeShiftGetCurrentPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
* Default implementation of {@link android.view.KeyEvent.Callback#onKeyDown(int, KeyEvent)
* KeyEvent.Callback.onKeyDown()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key down events before they are processed by the application.
- * If you return true, the application will not process the event itself. If you return
- * false, the normal application processing will occur as if the TV input had not seen the
- * event at all.
+ *
+ * <p>Override this to intercept key down events before they are processed by the
+ * application. If you return true, the application will not process the event itself. If
+ * you return false, the normal application processing will occur as if the TV input had not
+ * seen the event at all.
*
* @param keyCode The value in event.getKeyCode().
* @param event Description of the key event.
@@ -778,8 +963,8 @@ public abstract class TvInputService extends Service {
* Default implementation of
* {@link android.view.KeyEvent.Callback#onKeyLongPress(int, KeyEvent)
* KeyEvent.Callback.onKeyLongPress()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key long press events before they are processed by the
+ *
+ * <p>Override this to intercept key long press events before they are processed by the
* application. If you return true, the application will not process the event itself. If
* you return false, the normal application processing will occur as if the TV input had not
* seen the event at all.
@@ -798,11 +983,11 @@ public abstract class TvInputService extends Service {
* Default implementation of
* {@link android.view.KeyEvent.Callback#onKeyMultiple(int, int, KeyEvent)
* KeyEvent.Callback.onKeyMultiple()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept special key multiple events before they are processed by the
- * application. If you return true, the application will not itself process the event. If
- * you return false, the normal application processing will occur as if the TV input had not
- * seen the event at all.
+ *
+ * <p>Override this to intercept special key multiple events before they are processed by
+ * the application. If you return true, the application will not itself process the event.
+ * If you return false, the normal application processing will occur as if the TV input had
+ * not seen the event at all.
*
* @param keyCode The value in event.getKeyCode().
* @param count The number of times the action was made.
@@ -818,9 +1003,9 @@ public abstract class TvInputService extends Service {
/**
* Default implementation of {@link android.view.KeyEvent.Callback#onKeyUp(int, KeyEvent)
* KeyEvent.Callback.onKeyUp()}: always returns false (doesn't handle the event).
- * <p>
- * Override this to intercept key up events before they are processed by the application. If
- * you return true, the application will not itself process the event. If you return false,
+ *
+ * <p>Override this to intercept key up events before they are processed by the application.
+ * If you return true, the application will not itself process the event. If you return false,
* the normal application processing will occur as if the TV input had not seen the event at
* all.
*
@@ -887,6 +1072,7 @@ public abstract class TvInputService extends Service {
// Removes the overlay view lastly so that any hanging on the main thread can be handled
// in {@link #scheduleOverlayViewCleanup}.
removeOverlayView(true);
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
}
/**
@@ -930,6 +1116,7 @@ public abstract class TvInputService extends Service {
* Calls {@link #onTune}.
*/
void tune(Uri channelUri, Bundle params) {
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
onTune(channelUri, params);
// TODO: Handle failure.
}
@@ -967,7 +1154,7 @@ public abstract class TvInputService extends Service {
* Creates an overlay view. This calls {@link #onCreateOverlayView} to get a view to attach
* to the overlay window.
*
- * @param windowToken A window token of an application.
+ * @param windowToken A window token of the application.
* @param frame A position of the overlay view.
*/
void createOverlayView(IBinder windowToken, Rect frame) {
@@ -1059,6 +1246,49 @@ public abstract class TvInputService extends Service {
}
/**
+ * Calls {@link #onTimeShiftPause}.
+ */
+ void timeShiftPause() {
+ onTimeShiftPause();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftResume}.
+ */
+ void timeShiftResume() {
+ onTimeShiftResume();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSeekTo}.
+ */
+ void timeShiftSeekTo(long timeMs) {
+ onTimeShiftSeekTo(timeMs);
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSetPlaybackRate}.
+ */
+ void timeShiftSetPlaybackRate(float rate, int audioMode) {
+ onTimeShiftSetPlaybackRate(rate, audioMode);
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (enable) {
+ mHandler.post(mTimeShiftPositionTrackingRunnable);
+ } else {
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mStartPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+ }
+
+ /**
* Schedules a task which checks whether the overlay view is detached and kills the process
* if it is not. Note that this method is expected to be called in a non-main thread.
*/
@@ -1077,12 +1307,19 @@ public abstract class TvInputService extends Service {
int dispatchInputEvent(InputEvent event, InputEventReceiver receiver) {
if (DEBUG) Log.d(TAG, "dispatchInputEvent(" + event + ")");
boolean isNavigationKey = false;
+ boolean skipDispatchToOverlayView = false;
if (event instanceof KeyEvent) {
KeyEvent keyEvent = (KeyEvent) event;
- isNavigationKey = isNavigationKey(keyEvent.getKeyCode());
if (keyEvent.dispatch(this, mDispatcherState, this)) {
return TvInputManager.Session.DISPATCH_HANDLED;
}
+ isNavigationKey = isNavigationKey(keyEvent.getKeyCode());
+ // When media keys and KEYCODE_MEDIA_AUDIO_TRACK are dispatched to ViewRootImpl,
+ // ViewRootImpl always consumes the keys. In this case, the application loses
+ // a chance to handle media keys. Therefore, media keys are not dispatched to
+ // ViewRootImpl.
+ skipDispatchToOverlayView = KeyEvent.isMediaKey(keyEvent.getKeyCode())
+ || keyEvent.getKeyCode() == KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK;
} else if (event instanceof MotionEvent) {
MotionEvent motionEvent = (MotionEvent) event;
final int source = motionEvent.getSource();
@@ -1100,7 +1337,8 @@ public abstract class TvInputService extends Service {
}
}
}
- if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()) {
+ if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()
+ || skipDispatchToOverlayView) {
return TvInputManager.Session.DISPATCH_NOT_HANDLED;
}
if (!mOverlayViewContainer.hasWindowFocus()) {
@@ -1146,12 +1384,31 @@ public abstract class TvInputService extends Service {
}
}
+ private final class TimeShiftPositionTrackingRunnable implements Runnable {
+ @Override
+ public void run() {
+ long startPositionMs = onTimeShiftGetStartPosition();
+ if (mStartPositionMs != startPositionMs) {
+ mStartPositionMs = startPositionMs;
+ notifyTimeShiftStartPositionChanged(startPositionMs);
+ }
+ long currentPositionMs = onTimeShiftGetCurrentPosition();
+ if (mCurrentPositionMs != currentPositionMs) {
+ mCurrentPositionMs = currentPositionMs;
+ notifyTimeShiftCurrentPositionChanged(currentPositionMs);
+ }
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mHandler.postDelayed(mTimeShiftPositionTrackingRunnable,
+ POSITION_UPDATE_INTERVAL_MS);
+ }
+ }
+
private final class OverlayViewCleanUpTask extends AsyncTask<View, Void, Void> {
@Override
protected Void doInBackground(View... views) {
View overlayViewParent = views[0];
try {
- Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT);
+ Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT_MS);
} catch (InterruptedException e) {
return null;
}
@@ -1171,8 +1428,8 @@ public abstract class TvInputService extends Service {
/**
* Base class for a TV input session which represents an external device connected to a
* hardware TV input.
- * <p>
- * This class is for an input which provides channels for the external set-top box to the
+ *
+ * <p>This class is for an input which provides channels for the external set-top box to the
* application. Once a TV input returns an implementation of this class on
* {@link #onCreateSession(String)}, the framework will create a separate session for
* a hardware TV Input (e.g. HDMI 1) and forward the application's surface to the session so
@@ -1180,9 +1437,10 @@ public abstract class TvInputService extends Service {
* this TV input. The implementation of this class is expected to change the channel of the
* external set-top box via a proprietary protocol when {@link HardwareSession#onTune(Uri)} is
* requested by the application.
- * </p><p>
- * Note that this class is not for inputs for internal hardware like built-in tuner and HDMI 1.
- * </p>
+ *
+ * <p>Note that this class is not for inputs for internal hardware like built-in tuner and HDMI
+ * 1.
+ *
* @see #onCreateSession(String)
*/
public abstract static class HardwareSession extends Session {
@@ -1203,12 +1461,11 @@ public abstract class TvInputService extends Service {
/**
* Returns the hardware TV input ID the external device is connected to.
- * <p>
- * TV input is expected to provide {@link android.R.attr#setupActivity} so that
+ *
+ * <p>TV input is expected to provide {@link android.R.attr#setupActivity} so that
* the application can launch it before using this TV input. The setup activity may let
* the user select the hardware TV input to which the external device is connected. The ID
* of the selected one should be stored in the TV input so that it can be returned here.
- * </p>
*/
public abstract String getHardwareInputId();
@@ -1223,6 +1480,8 @@ public abstract class TvInputService extends Service {
args.arg2 = mProxySession;
args.arg3 = mProxySessionCallback;
args.arg4 = session.getToken();
+ session.tune(TvContract.buildChannelUriForPassthroughInput(
+ getHardwareInputId()));
} else {
args.arg1 = null;
args.arg2 = null;
@@ -1232,7 +1491,6 @@ public abstract class TvInputService extends Service {
}
mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED, args)
.sendToTarget();
- session.tune(TvContract.buildChannelUriForPassthroughInput(getHardwareInputId()));
}
@Override
@@ -1277,6 +1535,7 @@ public abstract class TvInputService extends Service {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onHardwareVideoUnavailable(int reason) { }
@@ -1317,7 +1576,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).addHardwareTvInput(deviceId, inputInfo);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastAddHardwareTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1329,7 +1588,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).addHdmiTvInput(id, inputInfo);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastAddHdmiTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1341,7 +1600,7 @@ public abstract class TvInputService extends Service {
try {
mCallbacks.getBroadcastItem(i).removeTvInput(inputId);
} catch (RemoteException e) {
- Log.e(TAG, "Error while broadcasting.", e);
+ Log.e(TAG, "error in broadcastRemoveTvInput", e);
}
}
mCallbacks.finishBroadcast();
@@ -1362,7 +1621,7 @@ public abstract class TvInputService extends Service {
// Failed to create a session.
cb.onSessionCreated(null, null);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
return;
}
@@ -1383,7 +1642,7 @@ public abstract class TvInputService extends Service {
try {
cb.onSessionCreated(null, null);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
return;
}
@@ -1414,7 +1673,7 @@ public abstract class TvInputService extends Service {
try {
cb.onSessionCreated(stub, hardwareSessionToken);
} catch (RemoteException e) {
- Log.e(TAG, "error in onSessionCreated");
+ Log.e(TAG, "error in onSessionCreated", e);
}
if (sessionImpl != null) {
sessionImpl.initialize(cb);
diff --git a/media/java/android/media/tv/TvTrackInfo.java b/media/java/android/media/tv/TvTrackInfo.java
index 0284171..2c956e9 100644
--- a/media/java/android/media/tv/TvTrackInfo.java
+++ b/media/java/android/media/tv/TvTrackInfo.java
@@ -16,10 +16,13 @@
package android.media.tv;
+import android.annotation.NonNull;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
+import com.android.internal.util.Preconditions;
+
/**
* Encapsulates the format of tracks played in {@link TvInputService}.
*/
@@ -48,11 +51,12 @@ public final class TvTrackInfo implements Parcelable {
private final int mVideoWidth;
private final int mVideoHeight;
private final float mVideoFrameRate;
+ private final float mVideoPixelAspectRatio;
private final Bundle mExtra;
private TvTrackInfo(int type, String id, String language, String description,
int audioChannelCount, int audioSampleRate, int videoWidth, int videoHeight,
- float videoFrameRate, Bundle extra) {
+ float videoFrameRate, float videoPixelAspectRatio, Bundle extra) {
mType = type;
mId = id;
mLanguage = language;
@@ -62,6 +66,7 @@ public final class TvTrackInfo implements Parcelable {
mVideoWidth = videoWidth;
mVideoHeight = videoHeight;
mVideoFrameRate = videoFrameRate;
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
mExtra = extra;
}
@@ -75,6 +80,7 @@ public final class TvTrackInfo implements Parcelable {
mVideoWidth = in.readInt();
mVideoHeight = in.readInt();
mVideoFrameRate = in.readFloat();
+ mVideoPixelAspectRatio = in.readFloat();
mExtra = in.readBundle();
}
@@ -162,6 +168,17 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Returns the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ */
+ public final float getVideoPixelAspectRatio() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoPixelAspectRatio;
+ }
+
+ /**
* Returns the extra information about the current track.
*/
public final Bundle getExtra() {
@@ -190,6 +207,7 @@ public final class TvTrackInfo implements Parcelable {
dest.writeInt(mVideoWidth);
dest.writeInt(mVideoHeight);
dest.writeFloat(mVideoFrameRate);
+ dest.writeFloat(mVideoPixelAspectRatio);
dest.writeBundle(mExtra);
}
@@ -219,6 +237,7 @@ public final class TvTrackInfo implements Parcelable {
private int mVideoWidth;
private int mVideoHeight;
private float mVideoFrameRate;
+ private float mVideoPixelAspectRatio = 1.0f;
private Bundle mExtra;
/**
@@ -229,15 +248,13 @@ public final class TvTrackInfo implements Parcelable {
* @param id The ID of the track that uniquely identifies the current track among all the
* other tracks in the same TV program.
*/
- public Builder(int type, String id) {
+ public Builder(int type, @NonNull String id) {
if (type != TYPE_AUDIO
&& type != TYPE_VIDEO
&& type != TYPE_SUBTITLE) {
throw new IllegalArgumentException("Unknown type: " + type);
}
- if (id == null) {
- throw new IllegalArgumentException("id cannot be null");
- }
+ Preconditions.checkNotNull(id);
mType = type;
mId = id;
}
@@ -332,6 +349,25 @@ public final class TvTrackInfo implements Parcelable {
}
/**
+ * Sets the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * <p>This is needed for applications to be able to scale the video properly for some video
+ * formats such as 720x576 4:3 and 720x576 16:9 where pixels are not square. By default,
+ * applications assume the value of 1.0 (square pixels), so it is not necessary to set the
+ * pixel aspect ratio for most video formats.
+ *
+ * @param videoPixelAspectRatio The pixel aspect ratio of the video.
+ */
+ public final Builder setVideoPixelAspectRatio(float videoPixelAspectRatio) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
+ return this;
+ }
+
+ /**
* Sets the extra information about the current track.
*
* @param extra The extra information.
@@ -348,7 +384,8 @@ public final class TvTrackInfo implements Parcelable {
*/
public TvTrackInfo build() {
return new TvTrackInfo(mType, mId, mLanguage, mDescription, mAudioChannelCount,
- mAudioSampleRate, mVideoWidth, mVideoHeight, mVideoFrameRate, mExtra);
+ mAudioSampleRate, mVideoWidth, mVideoHeight, mVideoFrameRate,
+ mVideoPixelAspectRatio, mExtra);
}
}
}
diff --git a/media/java/android/media/tv/TvView.java b/media/java/android/media/tv/TvView.java
index 6fc1b82..7e64b17 100644
--- a/media/java/android/media/tv/TvView.java
+++ b/media/java/android/media/tv/TvView.java
@@ -16,6 +16,8 @@
package android.media.tv;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.content.Context;
import android.graphics.Canvas;
@@ -31,6 +33,7 @@ import android.os.Handler;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
+import android.util.Pair;
import android.view.InputEvent;
import android.view.KeyEvent;
import android.view.MotionEvent;
@@ -42,21 +45,22 @@ import android.view.ViewGroup;
import android.view.ViewRootImpl;
import java.lang.ref.WeakReference;
+import java.util.ArrayDeque;
import java.util.List;
+import java.util.Queue;
/**
* Displays TV contents. The TvView class provides a high level interface for applications to show
* TV programs from various TV sources that implement {@link TvInputService}. (Note that the list of
* TV inputs available on the system can be obtained by calling
* {@link TvInputManager#getTvInputList() TvInputManager.getTvInputList()}.)
- * <p>
- * Once the application supplies the URI for a specific TV channel to {@link #tune(String, Uri)}
+ *
+ * <p>Once the application supplies the URI for a specific TV channel to {@link #tune(String, Uri)}
* method, it takes care of underlying service binding (and unbinding if the current TvView is
* already bound to a service) and automatically allocates/deallocates resources needed. In addition
* to a few essential methods to control how the contents are presented, it also provides a way to
* dispatch input events to the connected TvInputService in order to enable custom key actions for
* the TV input.
- * </p>
*/
public class TvView extends ViewGroup {
private static final String TAG = "TvView";
@@ -66,10 +70,6 @@ public class TvView extends ViewGroup {
private static final int ZORDER_MEDIA_OVERLAY = 1;
private static final int ZORDER_ON_TOP = 2;
- private static final int CAPTION_DEFAULT = 0;
- private static final int CAPTION_ENABLED = 1;
- private static final int CAPTION_DISABLED = 2;
-
private static final WeakReference<TvView> NULL_TV_VIEW = new WeakReference<>(null);
private static final Object sMainTvViewLock = new Object();
@@ -85,11 +85,9 @@ public class TvView extends ViewGroup {
private MySessionCallback mSessionCallback;
private TvInputCallback mCallback;
private OnUnhandledInputEventListener mOnUnhandledInputEventListener;
- private boolean mHasStreamVolume;
- private float mStreamVolume;
- private int mCaptionEnabled;
- private String mAppPrivateCommandAction;
- private Bundle mAppPrivateCommandData;
+ private Float mStreamVolume;
+ private Boolean mCaptionEnabled;
+ private final Queue<Pair<String, Bundle>> mPendingAppPrivateCommands = new ArrayDeque<>();
private boolean mSurfaceChanged;
private int mSurfaceFormat;
@@ -103,6 +101,7 @@ public class TvView extends ViewGroup {
private int mSurfaceViewRight;
private int mSurfaceViewTop;
private int mSurfaceViewBottom;
+ private TimeShiftPositionCallback mTimeShiftPositionCallback;
private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
@Override
@@ -173,27 +172,26 @@ public class TvView extends ViewGroup {
/**
* Sets the callback to be invoked when an event is dispatched to this TvView.
*
- * @param callback The callback to receive events. A value of {@code null} removes any existing
- * callbacks.
+ * @param callback The callback to receive events. A value of {@code null} removes the existing
+ * callback.
*/
- public void setCallback(TvInputCallback callback) {
+ public void setCallback(@Nullable TvInputCallback callback) {
mCallback = callback;
}
/**
* Sets this as the main {@link TvView}.
- * <p>
- * The main {@link TvView} is a {@link TvView} whose corresponding TV input determines the
+ *
+ * <p>The main {@link TvView} is a {@link TvView} whose corresponding TV input determines the
* HDMI-CEC active source device. For an HDMI port input, one of source devices that is
* connected to that HDMI port becomes the active source. For an HDMI-CEC logical device input,
* the corresponding HDMI-CEC logical device becomes the active source. For any non-HDMI input
* (including the tuner, composite, S-Video, etc.), the internal device (= TV itself) becomes
* the active source.
- * </p><p>
- * First tuned {@link TvView} becomes main automatically, and keeps to be main until either
+ *
+ * <p>First tuned {@link TvView} becomes main automatically, and keeps to be main until either
* {@link #reset} is called for the main {@link TvView} or {@link #setMain} is called for other
* {@link TvView}.
- * </p>
* @hide
*/
@SystemApi
@@ -252,13 +250,16 @@ public class TvView extends ViewGroup {
}
/**
- * Sets the relative stream volume of this session to handle a change of audio focus.
+ * Sets the relative stream volume of this TvView.
*
- * @param volume A volume value between 0.0f to 1.0f.
+ * <p>This method is primarily used to handle audio focus changes or mute a specific TvView when
+ * multiple views are displayed. If the method has not yet been called, the TvView assumes the
+ * default value of {@code 1.0f}.
+ *
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
*/
public void setStreamVolume(float volume) {
if (DEBUG) Log.d(TAG, "setStreamVolume(" + volume + ")");
- mHasStreamVolume = true;
mStreamVolume = volume;
if (mSession == null) {
// Volume will be set once the connection has been made.
@@ -273,7 +274,7 @@ public class TvView extends ViewGroup {
* @param inputId The ID of TV input which will play the given channel.
* @param channelUri The URI of a channel.
*/
- public void tune(String inputId, Uri channelUri) {
+ public void tune(@NonNull String inputId, Uri channelUri) {
tune(inputId, channelUri, null);
}
@@ -321,8 +322,8 @@ public class TvView extends ViewGroup {
/**
* Resets this TvView.
- * <p>
- * This method is primarily used to un-tune the current TvView.
+ *
+ * <p>This method is primarily used to un-tune the current TvView.
*/
public void reset() {
if (DEBUG) Log.d(TAG, "reset()");
@@ -343,9 +344,8 @@ public class TvView extends ViewGroup {
/**
* Requests to unblock TV content according to the given rating.
- * <p>
- * This notifies TV input that blocked content is now OK to play.
- * </p>
+ *
+ * <p>This notifies TV input that blocked content is now OK to play.
*
* @param unblockedRating A TvContentRating to unblock.
* @see TvInputService.Session#notifyContentBlocked(TvContentRating)
@@ -360,13 +360,14 @@ public class TvView extends ViewGroup {
/**
* Enables or disables the caption in this TvView.
- * <p>
- * Note that this method does not take any effect unless the current TvView is tuned.
+ *
+ * <p>Note that this method does not take any effect unless the current TvView is tuned.
*
* @param enabled {@code true} to enable, {@code false} to disable.
*/
public void setCaptionEnabled(boolean enabled) {
- mCaptionEnabled = enabled ? CAPTION_ENABLED : CAPTION_DISABLED;
+ if (DEBUG) Log.d(TAG, "setCaptionEnabled(" + enabled + ")");
+ mCaptionEnabled = enabled;
if (mSession != null) {
mSession.setCaptionEnabled(enabled);
}
@@ -420,6 +421,72 @@ public class TvView extends ViewGroup {
}
/**
+ * Pauses playback. No-op if it is already paused. Call {@link #timeShiftResume} to resume.
+ */
+ public void timeShiftPause() {
+ if (mSession != null) {
+ mSession.timeShiftPause();
+ }
+ }
+
+ /**
+ * Resumes playback. No-op if it is already resumed. Call {@link #timeShiftPause} to pause.
+ */
+ public void timeShiftResume() {
+ if (mSession != null) {
+ mSession.timeShiftResume();
+ }
+ }
+
+ /**
+ * Seeks to a specified time position. {@code timeMs} must be equal to or greater than the start
+ * position returned by {@link TimeShiftPositionCallback#onTimeShiftStartPositionChanged} and
+ * equal to or less than the current time.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ */
+ public void timeShiftSeekTo(long timeMs) {
+ if (mSession != null) {
+ mSession.timeShiftSeekTo(timeMs);
+ }
+ }
+
+ /**
+ * Sets playback rate and audio mode.
+ *
+ * @param rate The ratio between desired playback rate and normal one.
+ * @param audioMode Audio playback mode. Must be one of the supported audio modes:
+ * <ul>
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_DEFAULT}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH}
+ * <li> {@link android.media.MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE}
+ * </ul>
+ */
+ public void timeShiftSetPlaybackRate(float rate, int audioMode) {
+ if (mSession != null) {
+ mSession.timeShiftSetPlaybackRate(rate, audioMode);
+ }
+ }
+
+ /**
+ * Sets the callback to be invoked when the time shift position is changed.
+ *
+ * @param callback The callback to receive time shift position changes. A value of {@code null}
+ * removes the existing callback.
+ */
+ public void setTimeShiftPositionCallback(@Nullable TimeShiftPositionCallback callback) {
+ mTimeShiftPositionCallback = callback;
+ ensurePositionTracking();
+ }
+
+ private void ensurePositionTracking() {
+ if (mSession == null) {
+ return;
+ }
+ mSession.timeShiftEnablePositionTracking(mTimeShiftPositionCallback != null);
+ }
+
+ /**
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
@@ -430,26 +497,23 @@ public class TvView extends ViewGroup {
* @hide
*/
@SystemApi
- public void sendAppPrivateCommand(String action, Bundle data) {
+ public void sendAppPrivateCommand(@NonNull String action, Bundle data) {
if (TextUtils.isEmpty(action)) {
throw new IllegalArgumentException("action cannot be null or an empty string");
}
if (mSession != null) {
mSession.sendAppPrivateCommand(action, data);
} else {
- Log.w(TAG, "sendAppPrivateCommand - session not created (action " + action + " cached)");
- if (mAppPrivateCommandAction != null) {
- Log.w(TAG, "previous cached action " + action + " removed");
- }
- mAppPrivateCommandAction = action;
- mAppPrivateCommandData = data;
+ Log.w(TAG, "sendAppPrivateCommand - session not yet created (action \"" + action
+ + "\" pending)");
+ mPendingAppPrivateCommands.add(Pair.create(action, data));
}
}
/**
* Dispatches an unhandled input event to the next receiver.
- * <p>
- * Except system keys, TvView always consumes input events in the normal flow. This is called
+ *
+ * <p>Except system keys, TvView always consumes input events in the normal flow. This is called
* asynchronously from where the event is dispatched. It gives the host application a chance to
* dispatch the unhandled input events.
*
@@ -664,8 +728,7 @@ public class TvView extends ViewGroup {
}
private void release() {
- mAppPrivateCommandAction = null;
- mAppPrivateCommandData = null;
+ mPendingAppPrivateCommands.clear();
setSessionSurface(null);
removeSessionOverlayView();
@@ -729,6 +792,37 @@ public class TvView extends ViewGroup {
}
/**
+ * Callback used to receive time shift position changes.
+ */
+ public abstract static class TimeShiftPositionCallback {
+
+ /**
+ * This is called when the start playback position is changed.
+ *
+ * <p>The start playback position of the time shifted program can be adjusted by the TV
+ * input when it cannot retain the whole recorded program due to some reason (e.g.
+ * limitation on storage space). The application should not allow the user to seek to a
+ * position earlier than the start position.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The start playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(String inputId, long timeMs) {
+ }
+
+ /**
+ * This is called when the current playback position is changed.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The current playback position of the time shifted program, in milliseconds
+ * since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(String inputId, long timeMs) {
+ }
+ }
+
+ /**
* Callback used to receive various status updates on the {@link TvView}.
*/
public abstract static class TvInputCallback {
@@ -811,6 +905,7 @@ public class TvView extends ViewGroup {
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
* <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
* </ul>
*/
public void onVideoUnavailable(String inputId, int reason) {
@@ -838,6 +933,7 @@ public class TvView extends ViewGroup {
/**
* This is invoked when a custom event from the bound TV input is sent to this view.
*
+ * @param inputId The ID of the TV input bound to this view.
* @param eventType The type of the event.
* @param eventArgs Optional arguments of the event.
* @hide
@@ -845,6 +941,20 @@ public class TvView extends ViewGroup {
@SystemApi
public void onEvent(String inputId, String eventType, Bundle eventArgs) {
}
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(String inputId, int status) {
+ }
}
/**
@@ -853,8 +963,8 @@ public class TvView extends ViewGroup {
public interface OnUnhandledInputEventListener {
/**
* Called when an input event was not handled by the bound TV input.
- * <p>
- * This is called asynchronously from where the event is dispatched. It gives the host
+ *
+ * <p>This is called asynchronously from where the event is dispatched. It gives the host
* application a chance to handle the unhandled input events.
*
* @param event The input event.
@@ -890,6 +1000,12 @@ public class TvView extends ViewGroup {
}
mSession = session;
if (session != null) {
+ // Sends the pending app private commands first.
+ for (Pair<String, Bundle> command : mPendingAppPrivateCommands) {
+ mSession.sendAppPrivateCommand(command.first, command.second);
+ }
+ mPendingAppPrivateCommands.clear();
+
synchronized (sMainTvViewLock) {
if (hasWindowFocus() && TvView.this == sMainTvView.get()) {
mSession.setMain();
@@ -905,19 +1021,14 @@ public class TvView extends ViewGroup {
}
}
createSessionOverlayView();
- if (mCaptionEnabled != CAPTION_DEFAULT) {
- mSession.setCaptionEnabled(mCaptionEnabled == CAPTION_ENABLED);
- }
- mSession.tune(mChannelUri, mTuneParams);
- if (mHasStreamVolume) {
+ if (mStreamVolume != null) {
mSession.setStreamVolume(mStreamVolume);
}
- if (mAppPrivateCommandAction != null) {
- mSession.sendAppPrivateCommand(
- mAppPrivateCommandAction, mAppPrivateCommandData);
- mAppPrivateCommandAction = null;
- mAppPrivateCommandData = null;
+ if (mCaptionEnabled != null) {
+ mSession.setCaptionEnabled(mCaptionEnabled);
}
+ mSession.tune(mChannelUri, mTuneParams);
+ ensurePositionTracking();
} else {
mSessionCallback = null;
if (mCallback != null) {
@@ -1087,5 +1198,47 @@ public class TvView extends ViewGroup {
mCallback.onEvent(mInputId, eventType, eventArgs);
}
}
+
+ @Override
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStatusChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStatusChanged - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onTimeShiftStatusChanged(mInputId, status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStartPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStartPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftStartPositionChanged(mInputId, timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftCurrentPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftCurrentPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftCurrentPositionChanged(mInputId, timeMs);
+ }
+ }
}
}
diff --git a/media/java/android/mtp/MtpStorage.java b/media/java/android/mtp/MtpStorage.java
index e20eabc..3641ff5 100644
--- a/media/java/android/mtp/MtpStorage.java
+++ b/media/java/android/mtp/MtpStorage.java
@@ -38,7 +38,7 @@ public class MtpStorage {
public MtpStorage(StorageVolume volume, Context context) {
mStorageId = volume.getStorageId();
mPath = volume.getPath();
- mDescription = context.getResources().getString(volume.getDescriptionId());
+ mDescription = volume.getDescription(context);
mReserveSpace = volume.getMtpReserveSpace() * 1024L * 1024L;
mRemovable = volume.isRemovable();
mMaxFileSize = volume.getMaxFileSize();
@@ -59,7 +59,7 @@ public class MtpStorage {
*
* @return the storage ID
*/
- public static int getStorageId(int index) {
+ public static int getStorageIdForIndex(int index) {
// storage ID is 0x00010001 for primary storage,
// then 0x00020001, 0x00030001, etc. for secondary storages
return ((index + 1) << 16) + 1;
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 4ebbe26..dbb53b4 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -2,21 +2,25 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
+ android_media_AmrInputStream.cpp \
+ android_media_ImageWriter.cpp \
android_media_ImageReader.cpp \
android_media_MediaCrypto.cpp \
android_media_MediaCodec.cpp \
android_media_MediaCodecList.cpp \
+ android_media_MediaDataSource.cpp \
android_media_MediaDrm.cpp \
android_media_MediaExtractor.cpp \
android_media_MediaHTTPConnection.cpp \
+ android_media_MediaMetadataRetriever.cpp \
android_media_MediaMuxer.cpp \
android_media_MediaPlayer.cpp \
+ android_media_MediaProfiles.cpp \
android_media_MediaRecorder.cpp \
android_media_MediaScanner.cpp \
- android_media_MediaMetadataRetriever.cpp \
+ android_media_MediaSync.cpp \
android_media_ResampleInputStream.cpp \
- android_media_MediaProfiles.cpp \
- android_media_AmrInputStream.cpp \
+ android_media_SyncSettings.cpp \
android_media_Utils.cpp \
android_mtp_MtpDatabase.cpp \
android_mtp_MtpDevice.cpp \
@@ -40,7 +44,7 @@ LOCAL_SHARED_LIBRARIES := \
libusbhost \
libjhead \
libexif \
- libstagefright_amrnb_common \
+ libstagefright_amrnb_common
LOCAL_REQUIRED_MODULES := \
libjhead_jni
@@ -52,6 +56,7 @@ LOCAL_C_INCLUDES += \
external/libexif/ \
external/tremor/Tremor \
frameworks/base/core/jni \
+ frameworks/base/libs/hwui \
frameworks/av/media/libmedia \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/codecs/amrnb/enc/src \
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index b247493..043e20b 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -24,6 +24,7 @@
#include <cstdio>
#include <gui/CpuConsumer.h>
+#include <gui/BufferItemConsumer.h>
#include <gui/Surface.h>
#include <camera3.h>
@@ -39,7 +40,7 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
#define ANDROID_MEDIA_IMAGEREADER_CTX_JNI_ID "mNativeContext"
-#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mLockedBuffer"
+#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mNativeBuffer"
#define ANDROID_MEDIA_SURFACEIMAGE_TS_JNI_ID "mTimestamp"
// ----------------------------------------------------------------------------
@@ -62,7 +63,7 @@ static struct {
} gImageReaderClassInfo;
static struct {
- jfieldID mLockedBuffer;
+ jfieldID mNativeBuffer;
jfieldID mTimestamp;
} gSurfaceImageClassInfo;
@@ -73,7 +74,7 @@ static struct {
// ----------------------------------------------------------------------------
-class JNIImageReaderContext : public CpuConsumer::FrameAvailableListener
+class JNIImageReaderContext : public ConsumerBase::FrameAvailableListener
{
public:
JNIImageReaderContext(JNIEnv* env, jobject weakThiz, jclass clazz, int maxImages);
@@ -83,12 +84,19 @@ public:
virtual void onFrameAvailable(const BufferItem& item);
CpuConsumer::LockedBuffer* getLockedBuffer();
-
void returnLockedBuffer(CpuConsumer::LockedBuffer* buffer);
+ BufferItem* getOpaqueBuffer();
+ void returnOpaqueBuffer(BufferItem* buffer);
+
void setCpuConsumer(const sp<CpuConsumer>& consumer) { mConsumer = consumer; }
CpuConsumer* getCpuConsumer() { return mConsumer.get(); }
+ void setOpaqueConsumer(const sp<BufferItemConsumer>& consumer) { mOpaqueConsumer = consumer; }
+ BufferItemConsumer* getOpaqueConsumer() { return mOpaqueConsumer.get(); }
+ // This is the only opaque format exposed in the ImageFormat public API.
+ bool isOpaque() { return mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; }
+
void setProducer(const sp<IGraphicBufferProducer>& producer) { mProducer = producer; }
IGraphicBufferProducer* getProducer() { return mProducer.get(); }
@@ -109,7 +117,9 @@ private:
static void detachJNI();
List<CpuConsumer::LockedBuffer*> mBuffers;
+ List<BufferItem*> mOpaqueBuffers;
sp<CpuConsumer> mConsumer;
+ sp<BufferItemConsumer> mOpaqueConsumer;
sp<IGraphicBufferProducer> mProducer;
jobject mWeakThiz;
jclass mClazz;
@@ -125,7 +135,9 @@ JNIImageReaderContext::JNIImageReaderContext(JNIEnv* env,
mClazz((jclass)env->NewGlobalRef(clazz)) {
for (int i = 0; i < maxImages; i++) {
CpuConsumer::LockedBuffer *buffer = new CpuConsumer::LockedBuffer;
+ BufferItem* opaqueBuffer = new BufferItem;
mBuffers.push_back(buffer);
+ mOpaqueBuffers.push_back(opaqueBuffer);
}
}
@@ -169,6 +181,21 @@ void JNIImageReaderContext::returnLockedBuffer(CpuConsumer::LockedBuffer* buffer
mBuffers.push_back(buffer);
}
+BufferItem* JNIImageReaderContext::getOpaqueBuffer() {
+ if (mOpaqueBuffers.empty()) {
+ return NULL;
+ }
+ // Return an opaque buffer pointer and remove it from the list
+ List<BufferItem*>::iterator it = mOpaqueBuffers.begin();
+ BufferItem* buffer = *it;
+ mOpaqueBuffers.erase(it);
+ return buffer;
+}
+
+void JNIImageReaderContext::returnOpaqueBuffer(BufferItem* buffer) {
+ mOpaqueBuffers.push_back(buffer);
+}
+
JNIImageReaderContext::~JNIImageReaderContext() {
bool needsDetach = false;
JNIEnv* env = getJNIEnv(&needsDetach);
@@ -187,8 +214,20 @@ JNIImageReaderContext::~JNIImageReaderContext() {
it != mBuffers.end(); it++) {
delete *it;
}
+
+ // Delete opaque buffers
+ for (List<BufferItem *>::iterator it = mOpaqueBuffers.begin();
+ it != mOpaqueBuffers.end(); it++) {
+ delete *it;
+ }
+
mBuffers.clear();
- mConsumer.clear();
+ if (mConsumer != 0) {
+ mConsumer.clear();
+ }
+ if (mOpaqueConsumer != 0) {
+ mOpaqueConsumer.clear();
+ }
}
void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
@@ -210,6 +249,11 @@ void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
extern "C" {
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
static JNIImageReaderContext* ImageReader_getContext(JNIEnv* env, jobject thiz)
{
JNIImageReaderContext *ctx;
@@ -226,6 +270,13 @@ static CpuConsumer* ImageReader_getCpuConsumer(JNIEnv* env, jobject thiz)
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
+ if (ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque ImageReader doesn't support this method");
+ return NULL;
+ }
+
return ctx->getCpuConsumer();
}
@@ -237,6 +288,7 @@ static IGraphicBufferProducer* ImageReader_getProducer(JNIEnv* env, jobject thiz
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
return ctx->getProducer();
}
@@ -258,13 +310,19 @@ static void ImageReader_setNativeContext(JNIEnv* env,
static CpuConsumer::LockedBuffer* Image_getLockedBuffer(JNIEnv* env, jobject image)
{
return reinterpret_cast<CpuConsumer::LockedBuffer*>(
- env->GetLongField(image, gSurfaceImageClassInfo.mLockedBuffer));
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
}
static void Image_setBuffer(JNIEnv* env, jobject thiz,
const CpuConsumer::LockedBuffer* buffer)
{
- env->SetLongField(thiz, gSurfaceImageClassInfo.mLockedBuffer, reinterpret_cast<jlong>(buffer));
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
+}
+
+static void Image_setOpaqueBuffer(JNIEnv* env, jobject thiz,
+ const BufferItem* buffer)
+{
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
}
static uint32_t Image_getJpegSize(CpuConsumer::LockedBuffer* buffer, bool usingRGBAOverride)
@@ -431,6 +489,19 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
pData = buffer->data;
dataSize = buffer->stride * buffer->height;
break;
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 12 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 12 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ break;
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_RGBX_8888:
// Single plane, 32bpp.
@@ -492,8 +563,10 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
break;
case HAL_PIXEL_FORMAT_BLOB:
case HAL_PIXEL_FORMAT_RAW10:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Blob is used for JPEG data, RAW10 and RAW12 is used for 10-bit and 12-bit raw data,
+ // those are single plane data with pixel stride 0 since they don't really have a
+ // well defined pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
pixelStride = 0;
break;
@@ -549,12 +622,14 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
rowStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
break;
case HAL_PIXEL_FORMAT_BLOB:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ // Blob is used for JPEG data. It is single plane and has 0 row stride and
+ // 0 pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = 0;
break;
case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW12:
+ // RAW10 and RAW12 are used for 10-bit and 12-bit raw data, they are single plane
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = buffer->stride;
break;
@@ -616,6 +691,52 @@ static int Image_getBufferHeight(CpuConsumer::LockedBuffer* buffer) {
return buffer->height;
}
+// --------------------------Methods for opaque Image and ImageReader----------
+
+static BufferItemConsumer* ImageReader_getOpaqueConsumer(JNIEnv* env, jobject thiz)
+{
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* const ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return NULL;
+ }
+
+ if (!ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Non-opaque ImageReader doesn't support this method");
+ }
+
+ return ctx->getOpaqueConsumer();
+}
+
+static BufferItem* Image_getOpaqueBuffer(JNIEnv* env, jobject image)
+{
+ return reinterpret_cast<BufferItem*>(
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
+}
+
+static int Image_getOpaqueBufferWidth(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getWidth();
+ }
+ return buffer->mGraphicBuffer->getWidth();
+}
+
+static int Image_getOpaqueBufferHeight(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getHeight();
+ }
+
+ return buffer->mGraphicBuffer->getHeight();
+}
+
+
+
// ----------------------------------------------------------------------------
static void ImageReader_classInit(JNIEnv* env, jclass clazz)
@@ -625,9 +746,9 @@ static void ImageReader_classInit(JNIEnv* env, jclass clazz)
jclass imageClazz = env->FindClass("android/media/ImageReader$SurfaceImage");
LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
"can't find android/graphics/ImageReader$SurfaceImage");
- gSurfaceImageClassInfo.mLockedBuffer = env->GetFieldID(
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
imageClazz, ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID, "J");
- LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mLockedBuffer == NULL,
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
"can't find android/graphics/ImageReader.%s",
ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID);
@@ -674,24 +795,42 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
nativeDataspace = android_view_Surface_mapPublicFormatToHalDataspace(
publicFormat);
- sp<IGraphicBufferProducer> gbProducer;
- sp<IGraphicBufferConsumer> gbConsumer;
- BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
- sp<CpuConsumer> consumer = new CpuConsumer(gbConsumer, maxImages,
- /*controlledByApp*/true);
- // TODO: throw dvm exOutOfMemoryError?
- if (consumer == NULL) {
- jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
- return;
- }
-
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
jniThrowRuntimeException(env, "Can't find android/graphics/ImageReader");
return;
}
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
- ctx->setCpuConsumer(consumer);
+
+ sp<IGraphicBufferProducer> gbProducer;
+ sp<IGraphicBufferConsumer> gbConsumer;
+ BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+ sp<ConsumerBase> consumer;
+ sp<CpuConsumer> cpuConsumer;
+ sp<BufferItemConsumer> opaqueConsumer;
+ if (isFormatOpaque(nativeFormat)) {
+ // Use the SW_READ_NEVER usage to tell producer that this format is not for preview or video
+ // encoding. The only possibility will be ZSL output.
+ opaqueConsumer =
+ new BufferItemConsumer(gbConsumer, GRALLOC_USAGE_SW_READ_NEVER, maxImages,
+ /*controlledByApp*/true);
+ if (opaqueConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native opaque consumer");
+ return;
+ }
+ ctx->setOpaqueConsumer(opaqueConsumer);
+ consumer = opaqueConsumer;
+ } else {
+ cpuConsumer = new CpuConsumer(gbConsumer, maxImages, /*controlledByApp*/true);
+ // TODO: throw dvm exOutOfMemoryError?
+ if (cpuConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
+ return;
+ }
+ ctx->setCpuConsumer(cpuConsumer);
+ consumer = cpuConsumer;
+ }
+
ctx->setProducer(gbProducer);
consumer->setFrameAvailableListener(ctx);
ImageReader_setNativeContext(env, thiz, ctx);
@@ -701,23 +840,42 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
ctx->setBufferHeight(height);
// Set the width/height/format/dataspace to the CpuConsumer
- res = consumer->setDefaultBufferSize(width, height);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer size");
- return;
- }
- res = consumer->setDefaultBufferFormat(nativeFormat);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer format");
- }
- res = consumer->setDefaultBufferDataSpace(nativeDataspace);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer dataSpace");
+ // TODO: below code can be simplified once b/19977701 is fixed.
+ if (isFormatOpaque(nativeFormat)) {
+ res = opaqueConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer size");
+ return;
+ }
+ res = opaqueConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer format");
+ }
+ res = opaqueConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer dataSpace");
+ }
+ } else {
+ res = cpuConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer size");
+ return;
+ }
+ res = cpuConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer format");
+ }
+ res = cpuConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer dataSpace");
+ }
}
-
}
static void ImageReader_close(JNIEnv* env, jobject thiz)
@@ -730,7 +888,13 @@ static void ImageReader_close(JNIEnv* env, jobject thiz)
return;
}
- CpuConsumer* consumer = ImageReader_getCpuConsumer(env, thiz);
+ ConsumerBase* consumer = NULL;
+ if (ctx->isOpaque()) {
+ consumer = ImageReader_getOpaqueConsumer(env, thiz);
+ } else {
+ consumer = ImageReader_getCpuConsumer(env, thiz);
+ }
+
if (consumer != NULL) {
consumer->abandon();
consumer->setFrameAvailableListener(NULL);
@@ -747,27 +911,66 @@ static void ImageReader_imageRelease(JNIEnv* env, jobject thiz, jobject image)
return;
}
- CpuConsumer* consumer = ctx->getCpuConsumer();
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
- if (!buffer) {
- ALOGW("Image already released!!!");
- return;
+ if (ctx->isOpaque()) {
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ opaqueConsumer->releaseBuffer(*opaqueBuffer); // Not using fence for now.
+ Image_setOpaqueBuffer(env, image, NULL);
+ ctx->returnOpaqueBuffer(opaqueBuffer);
+ ALOGV("%s: Opaque Image has been released", __FUNCTION__);
+ } else {
+ CpuConsumer* consumer = ctx->getCpuConsumer();
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
+ if (!buffer) {
+ ALOGW("Image already released!!!");
+ return;
+ }
+ consumer->unlockBuffer(*buffer);
+ Image_setBuffer(env, image, NULL);
+ ctx->returnLockedBuffer(buffer);
+ ALOGV("%s: Image (format: 0x%x) has been released", __FUNCTION__, ctx->getBufferFormat());
}
- consumer->unlockBuffer(*buffer);
- Image_setBuffer(env, image, NULL);
- ctx->returnLockedBuffer(buffer);
}
-static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
- jobject image)
-{
+static jint ImageReader_opaqueImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
ALOGV("%s:", __FUNCTION__);
- JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
- if (ctx == NULL) {
+ if (ctx == NULL || !ctx->isOpaque()) {
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return -1;
}
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* buffer = ctx->getOpaqueBuffer();
+ if (buffer == NULL) {
+ ALOGW("Unable to acquire a buffer item, very likely client tried to acquire more than"
+ " maxImages buffers");
+ return ACQUIRE_MAX_IMAGES;
+ }
+
+ status_t res = opaqueConsumer->acquireBuffer(buffer, 0);
+ if (res != OK) {
+ ctx->returnOpaqueBuffer(buffer);
+ if (res == INVALID_OPERATION) {
+ // Max number of images were already acquired.
+ ALOGE("%s: Max number of buffers allowed are already acquired : %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_MAX_IMAGES;
+ } else {
+ ALOGE("%s: Acquire image failed with error: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_NO_BUFFERS;
+ }
+ }
+
+ // Set SurfaceImage instance member variables
+ Image_setOpaqueBuffer(env, image, buffer);
+ env->SetLongField(image, gSurfaceImageClassInfo.mTimestamp,
+ static_cast<jlong>(buffer->mTimestamp));
+
+ return ACQUIRE_SUCCESS;
+}
+
+static jint ImageReader_lockedImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
CpuConsumer* consumer = ctx->getCpuConsumer();
CpuConsumer::LockedBuffer* buffer = ctx->getLockedBuffer();
if (buffer == NULL) {
@@ -860,6 +1063,57 @@ static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
return ACQUIRE_SUCCESS;
}
+static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return -1;
+ }
+
+ if (ctx->isOpaque()) {
+ return ImageReader_opaqueImageSetup(env, ctx, image);
+ } else {
+ return ImageReader_lockedImageSetup(env, ctx, image);
+ }
+}
+
+static jint ImageReader_detachImage(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", "ImageReader was already closed");
+ return -1;
+ }
+
+ status_t res = OK;
+ if (!ctx->isOpaque()) {
+ // TODO: Non-Opaque format detach is not implemented yet.
+ jniThrowRuntimeException(env,
+ "nativeDetachImage is not implemented yet for non-opaque format !!!");
+ return -1;
+ }
+
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ if (!opaqueBuffer) {
+ ALOGE(
+ "Opaque Image already released and can not be detached from ImageReader!!!");
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque Image detach from ImageReader failed: buffer was already released");
+ return -1;
+ }
+
+ res = opaqueConsumer->detachBuffer(opaqueBuffer->mSlot);
+ if (res != OK) {
+ ALOGE("Opaque Image detach failed: %s (%d)!!!", strerror(-res), res);
+ jniThrowRuntimeException(env,
+ "nativeDetachImage failed for opaque image!!!");
+ return res;
+ }
+ return OK;
+}
+
static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
{
ALOGV("%s: ", __FUNCTION__);
@@ -878,8 +1132,15 @@ static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int
{
int rowStride, pixelStride;
PublicFormat publicReaderFormat = static_cast<PublicFormat>(readerFormat);
+ int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ publicReaderFormat);
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(halReaderFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any planes");
+ return NULL;
+ }
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
@@ -888,9 +1149,6 @@ static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat(
- publicReaderFormat);
-
rowStride = Image_imageGetRowStride(env, buffer, idx, halReaderFormat);
pixelStride = Image_imageGetPixelStride(env, buffer, idx, halReaderFormat);
@@ -906,18 +1164,23 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
uint32_t size = 0;
jobject byteBuffer;
PublicFormat readerPublicFormat = static_cast<PublicFormat>(readerFormat);
+ int readerHalFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ readerPublicFormat);
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(readerHalFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any plane");
+ return NULL;
+ }
+
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
if (buffer == NULL) {
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- int readerHalFormat = android_view_Surface_mapPublicFormatToHalFormat(
- readerPublicFormat);
-
// Create byteBuffer from native buffer
Image_getLockedBufferInfo(env, buffer, idx, &base, &size, readerHalFormat);
@@ -937,19 +1200,28 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
return byteBuffer;
}
-static jint Image_getWidth(JNIEnv* env, jobject thiz)
+static jint Image_getWidth(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferWidth(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferWidth(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferWidth(buffer);
+ }
}
-static jint Image_getHeight(JNIEnv* env, jobject thiz)
+static jint Image_getHeight(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferHeight(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferHeight(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferHeight(buffer);
+ }
}
-
} // extern "C"
// ----------------------------------------------------------------------------
@@ -961,14 +1233,15 @@ static JNINativeMethod gImageReaderMethods[] = {
{"nativeReleaseImage", "(Landroid/media/Image;)V", (void*)ImageReader_imageRelease },
{"nativeImageSetup", "(Landroid/media/Image;)I", (void*)ImageReader_imageSetup },
{"nativeGetSurface", "()Landroid/view/Surface;", (void*)ImageReader_getSurface },
+ {"nativeDetachImage", "(Landroid/media/Image;)I", (void*)ImageReader_detachImage },
};
static JNINativeMethod gImageMethods[] = {
{"nativeImageGetBuffer", "(II)Ljava/nio/ByteBuffer;", (void*)Image_getByteBuffer },
{"nativeCreatePlane", "(II)Landroid/media/ImageReader$SurfaceImage$SurfacePlane;",
(void*)Image_createSurfacePlane },
- {"nativeGetWidth", "()I", (void*)Image_getWidth },
- {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetWidth", "(I)I", (void*)Image_getWidth },
+ {"nativeGetHeight", "(I)I", (void*)Image_getHeight },
};
int register_android_media_ImageReader(JNIEnv *env) {
diff --git a/media/jni/android_media_ImageWriter.cpp b/media/jni/android_media_ImageWriter.cpp
new file mode 100644
index 0000000..294cd84
--- /dev/null
+++ b/media/jni/android_media_ImageWriter.cpp
@@ -0,0 +1,1083 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ImageWriter_JNI"
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
+#include <gui/CpuConsumer.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_runtime/android_view_Surface.h>
+#include <camera3.h>
+
+#include <jni.h>
+#include <JNIHelp.h>
+
+#include <stdint.h>
+#include <inttypes.h>
+
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
+
+#define IMAGE_BUFFER_JNI_ID "mNativeBuffer"
+
+// ----------------------------------------------------------------------------
+
+using namespace android;
+
+enum {
+ IMAGE_WRITER_MAX_NUM_PLANES = 3,
+};
+
+static struct {
+ jmethodID postEventFromNative;
+ jfieldID mWriterFormat;
+} gImageWriterClassInfo;
+
+static struct {
+ jfieldID mNativeBuffer;
+ jfieldID mNativeFenceFd;
+ jfieldID mPlanes;
+} gSurfaceImageClassInfo;
+
+static struct {
+ jclass clazz;
+ jmethodID ctor;
+} gSurfacePlaneClassInfo;
+
+typedef CpuConsumer::LockedBuffer LockedImage;
+
+// ----------------------------------------------------------------------------
+
+class JNIImageWriterContext : public BnProducerListener {
+public:
+ JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz);
+
+ virtual ~JNIImageWriterContext();
+
+ // Implementation of IProducerListener, used to notify the ImageWriter that the consumer
+ // has returned a buffer and it is ready for ImageWriter to dequeue.
+ virtual void onBufferReleased();
+
+ void setProducer(const sp<Surface>& producer) { mProducer = producer; }
+ Surface* getProducer() { return mProducer.get(); }
+
+ void setBufferFormat(int format) { mFormat = format; }
+ int getBufferFormat() { return mFormat; }
+
+ void setBufferWidth(int width) { mWidth = width; }
+ int getBufferWidth() { return mWidth; }
+
+ void setBufferHeight(int height) { mHeight = height; }
+ int getBufferHeight() { return mHeight; }
+
+private:
+ static JNIEnv* getJNIEnv(bool* needsDetach);
+ static void detachJNI();
+
+ sp<Surface> mProducer;
+ jobject mWeakThiz;
+ jclass mClazz;
+ int mFormat;
+ int mWidth;
+ int mHeight;
+};
+
+JNIImageWriterContext::JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz) :
+ mWeakThiz(env->NewGlobalRef(weakThiz)),
+ mClazz((jclass)env->NewGlobalRef(clazz)),
+ mFormat(0),
+ mWidth(-1),
+ mHeight(-1) {
+}
+
+JNIImageWriterContext::~JNIImageWriterContext() {
+ ALOGV("%s", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ env->DeleteGlobalRef(mWeakThiz);
+ env->DeleteGlobalRef(mClazz);
+ } else {
+ ALOGW("leaking JNI object references");
+ }
+ if (needsDetach) {
+ detachJNI();
+ }
+
+ mProducer.clear();
+}
+
+JNIEnv* JNIImageWriterContext::getJNIEnv(bool* needsDetach) {
+ ALOGV("%s", __FUNCTION__);
+ LOG_ALWAYS_FATAL_IF(needsDetach == NULL, "needsDetach is null!!!");
+ *needsDetach = false;
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ if (env == NULL) {
+ JavaVMAttachArgs args = {JNI_VERSION_1_4, NULL, NULL};
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->AttachCurrentThread(&env, (void*) &args);
+ if (result != JNI_OK) {
+ ALOGE("thread attach failed: %#x", result);
+ return NULL;
+ }
+ *needsDetach = true;
+ }
+ return env;
+}
+
+void JNIImageWriterContext::detachJNI() {
+ ALOGV("%s", __FUNCTION__);
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->DetachCurrentThread();
+ if (result != JNI_OK) {
+ ALOGE("thread detach failed: %#x", result);
+ }
+}
+
+void JNIImageWriterContext::onBufferReleased() {
+ ALOGV("%s: buffer released", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ // Detach the buffer every time when a buffer consumption is done,
+ // need let this callback give a BufferItem, then only detach if it was attached to this
+ // Writer. Do the detach unconditionally for opaque format now. see b/19977520
+ if (mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ sp<Fence> fence;
+ sp<GraphicBuffer> buffer;
+ ALOGV("%s: One buffer is detached", __FUNCTION__);
+ mProducer->detachNextBuffer(&buffer, &fence);
+ }
+
+ env->CallStaticVoidMethod(mClazz, gImageWriterClassInfo.postEventFromNative, mWeakThiz);
+ } else {
+ ALOGW("onBufferReleased event will not posted");
+ }
+
+ if (needsDetach) {
+ detachJNI();
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+extern "C" {
+
+// -------------------------------Private method declarations--------------
+
+static bool isPossiblyYUV(PixelFormat format);
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd);
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd);
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz);
+static bool isFormatOpaque(int format);
+
+// --------------------------ImageWriter methods---------------------------------------
+
+static void ImageWriter_classInit(JNIEnv* env, jclass clazz) {
+ ALOGV("%s:", __FUNCTION__);
+ jclass imageClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage");
+ LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage");
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
+ imageClazz, IMAGE_BUFFER_JNI_ID, "J");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.%s", IMAGE_BUFFER_JNI_ID);
+
+ gSurfaceImageClassInfo.mNativeFenceFd = env->GetFieldID(
+ imageClazz, "mNativeFenceFd", "I");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeFenceFd == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mNativeFenceFd");
+
+ gSurfaceImageClassInfo.mPlanes = env->GetFieldID(
+ imageClazz, "mPlanes", "[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mPlanes == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mPlanes");
+
+ gImageWriterClassInfo.postEventFromNative = env->GetStaticMethodID(
+ clazz, "postEventFromNative", "(Ljava/lang/Object;)V");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.postEventFromNative == NULL,
+ "can't find android/media/ImageWriter.postEventFromNative");
+
+ gImageWriterClassInfo.mWriterFormat = env->GetFieldID(
+ clazz, "mWriterFormat", "I");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.mWriterFormat == NULL,
+ "can't find android/media/ImageWriter.mWriterFormat");
+
+ jclass planeClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage$SurfacePlane");
+ LOG_ALWAYS_FATAL_IF(planeClazz == NULL, "Can not find SurfacePlane class");
+ // FindClass only gives a local reference of jclass object.
+ gSurfacePlaneClassInfo.clazz = (jclass) env->NewGlobalRef(planeClazz);
+ gSurfacePlaneClassInfo.ctor = env->GetMethodID(gSurfacePlaneClassInfo.clazz, "<init>",
+ "(Landroid/media/ImageWriter$WriterSurfaceImage;IILjava/nio/ByteBuffer;)V");
+ LOG_ALWAYS_FATAL_IF(gSurfacePlaneClassInfo.ctor == NULL,
+ "Can not find SurfacePlane constructor");
+}
+
+static jlong ImageWriter_init(JNIEnv* env, jobject thiz, jobject weakThiz, jobject jsurface,
+ jint maxImages) {
+ status_t res;
+
+ ALOGV("%s: maxImages:%d", __FUNCTION__, maxImages);
+
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface == NULL) {
+ jniThrowException(env,
+ "java/lang/IllegalArgumentException",
+ "The surface has been released");
+ return 0;
+ }
+ sp<IGraphicBufferProducer> bufferProducer = surface->getIGraphicBufferProducer();
+
+ jclass clazz = env->GetObjectClass(thiz);
+ if (clazz == NULL) {
+ jniThrowRuntimeException(env, "Can't find android/graphics/ImageWriter");
+ return 0;
+ }
+ sp<JNIImageWriterContext> ctx(new JNIImageWriterContext(env, weakThiz, clazz));
+
+ sp<Surface> producer = new Surface(bufferProducer, /*controlledByApp*/false);
+ ctx->setProducer(producer);
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not connectable
+ * after disconnect. MEDIA or CAMERA are treated the same internally. The producer listener
+ * will be cleared after disconnect call.
+ */
+ producer->connect(/*api*/NATIVE_WINDOW_API_CAMERA, /*listener*/ctx);
+ jlong nativeCtx = reinterpret_cast<jlong>(ctx.get());
+
+ // Get the dimension and format of the producer.
+ sp<ANativeWindow> anw = producer;
+ int32_t width, height, format;
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ ALOGE("%s: Query Surface width failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface width");
+ return 0;
+ }
+ ctx->setBufferWidth(width);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ ALOGE("%s: Query Surface height failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface height");
+ return 0;
+ }
+ ctx->setBufferHeight(height);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Query Surface format failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface format");
+ return 0;
+ }
+ ctx->setBufferFormat(format);
+ env->SetIntField(thiz, gImageWriterClassInfo.mWriterFormat, reinterpret_cast<jint>(format));
+
+
+ if (!isFormatOpaque(format)) {
+ res = native_window_set_usage(anw.get(), GRALLOC_USAGE_SW_WRITE_OFTEN);
+ if (res != OK) {
+ ALOGE("%s: Configure usage %08x for format %08x failed: %s (%d)",
+ __FUNCTION__, GRALLOC_USAGE_SW_WRITE_OFTEN, format, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to SW_WRITE_OFTEN configure usage");
+ return 0;
+ }
+ }
+
+ int minUndequeuedBufferCount = 0;
+ res = anw->query(anw.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Query producer undequeued buffer count failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Query producer undequeued buffer count failed");
+ return 0;
+ }
+
+ size_t totalBufferCount = maxImages + minUndequeuedBufferCount;
+ res = native_window_set_buffer_count(anw.get(), totalBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Set buffer count failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Set buffer count failed");
+ return 0;
+ }
+
+ if (ctx != 0) {
+ ctx->incStrong((void*)ImageWriter_init);
+ }
+ return nativeCtx;
+}
+
+static void ImageWriter_dequeueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+ android_native_buffer_t *anb = NULL;
+ int fenceFd = -1;
+ status_t res = anw->dequeueBuffer(anw.get(), &anb, &fenceFd);
+ if (res != OK) {
+ // TODO: handle different error cases here.
+ ALOGE("%s: Dequeue buffer failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "dequeue buffer failed");
+ return;
+ }
+ // New GraphicBuffer object doesn't own the handle, thus the native buffer
+ // won't be freed when this object is destroyed.
+ sp<GraphicBuffer> buffer(new GraphicBuffer(anb, /*keepOwnership*/false));
+
+ // Note that:
+ // 1. No need to lock buffer now, will only lock it when the first getPlanes() is called.
+ // 2. Fence will be saved to mNativeFenceFd, and will consumed by lock/queue/cancel buffer
+ // later.
+ // 3. need use lockAsync here, as it will handle the dequeued fence for us automatically.
+
+ // Finally, set the native info into image object.
+ Image_setNativeContext(env, image, buffer, fenceFd);
+}
+
+static void ImageWriter_close(JNIEnv* env, jobject thiz, jlong nativeCtx) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ ANativeWindow* producer = ctx->getProducer();
+ if (producer != NULL) {
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not
+ * connectable after disconnect. MEDIA or CAMERA are treated the same internally.
+ * The producer listener will be cleared after disconnect call.
+ */
+ status_t res = native_window_api_disconnect(producer, /*api*/NATIVE_WINDOW_API_CAMERA);
+ /**
+ * This is not an error. if client calling process dies, the window will
+ * also die and all calls to it will return DEAD_OBJECT, thus it's already
+ * "disconnected"
+ */
+ if (res == DEAD_OBJECT) {
+ ALOGW("%s: While disconnecting ImageWriter from native window, the"
+ " native window died already", __FUNCTION__);
+ } else if (res != OK) {
+ ALOGE("%s: native window disconnect failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Native window disconnect failed");
+ return;
+ }
+ }
+
+ ctx->decStrong((void*)ImageWriter_init);
+}
+
+static void ImageWriter_cancelImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock the image if it was locked
+ Image_unlockIfLocked(env, image);
+
+ anw->cancelBuffer(anw.get(), buffer, fenceFd);
+
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static void ImageWriter_queueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image,
+ jlong timestampNs, jint left, jint top, jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ status_t res = OK;
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock image if it was locked.
+ Image_unlockIfLocked(env, image);
+
+ // Set timestamp
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return;
+ }
+
+ // Set crop
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return;
+ }
+
+ // Finally, queue input buffer
+ res = anw->queueBuffer(anw.get(), buffer, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return;
+ }
+
+ // Clear the image native context: end of this image's lifecycle in public API.
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static jint ImageWriter_attachAndQueueImage(JNIEnv* env, jobject thiz, jlong nativeCtx,
+ jlong nativeBuffer, jint imageFormat, jlong timestampNs, jint left, jint top,
+ jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return -1;
+ }
+
+ sp<Surface> surface = ctx->getProducer();
+ status_t res = OK;
+ if (!isFormatOpaque(imageFormat)) {
+ // TODO: need implement, see b/19962027
+ jniThrowRuntimeException(env,
+ "nativeAttachImage for non-opaque image is not implement yet!!!");
+ return -1;
+ }
+
+ if (!isFormatOpaque(ctx->getBufferFormat())) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Trying to attach an opaque image into a non-opaque ImageWriter");
+ return -1;
+ }
+
+ // Image is guaranteed to be from ImageReader at this point, so it is safe to
+ // cast to BufferItem pointer.
+ BufferItem* opaqueBuffer = reinterpret_cast<BufferItem*>(nativeBuffer);
+ if (opaqueBuffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized or already closed");
+ return -1;
+ }
+
+ // Step 1. Attach Image
+ res = surface->attachBuffer(opaqueBuffer->mGraphicBuffer.get());
+ if (res != OK) {
+ // TODO: handle different error case separately.
+ ALOGE("Attach image failed: %s (%d)", strerror(-res), res);
+ jniThrowRuntimeException(env, "nativeAttachImage failed!!!");
+ return res;
+ }
+ sp < ANativeWindow > anw = surface;
+
+ // Step 2. Set timestamp and crop. Note that we do not need unlock the image because
+ // it was not locked.
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return res;
+ }
+
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return res;
+ }
+
+ // Step 3. Queue Image.
+ res = anw->queueBuffer(anw.get(), opaqueBuffer->mGraphicBuffer.get(), /*fenceFd*/
+ -1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return res;
+ }
+
+ // Do not set the image native context. Since it would overwrite the existing native context
+ // of the image that is from ImageReader, the subsequent image close will run into issues.
+
+ return res;
+}
+
+// --------------------------Image methods---------------------------------------
+
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd) {
+ ALOGV("%s", __FUNCTION__);
+ if (buffer != NULL) {
+ GraphicBuffer *gb = reinterpret_cast<GraphicBuffer *>
+ (env->GetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer));
+ *buffer = gb;
+ }
+
+ if (fenceFd != NULL) {
+ *fenceFd = reinterpret_cast<jint>(env->GetIntField(
+ thiz, gSurfaceImageClassInfo.mNativeFenceFd));
+ }
+}
+
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ GraphicBuffer* p = NULL;
+ Image_getNativeContext(env, thiz, &p, /*fenceFd*/NULL);
+ if (buffer != 0) {
+ buffer->incStrong((void*)Image_setNativeContext);
+ }
+ if (p) {
+ p->decStrong((void*)Image_setNativeContext);
+ }
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer,
+ reinterpret_cast<jlong>(buffer.get()));
+
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Is locked?
+ bool isLocked = false;
+ jobject planes = NULL;
+ if (!isFormatOpaque(buffer->getPixelFormat())) {
+ planes = env->GetObjectField(thiz, gSurfaceImageClassInfo.mPlanes);
+ }
+ isLocked = (planes != NULL);
+ if (isLocked) {
+ // no need to use fence here, as we it will be consumed by either cancel or queue buffer.
+ status_t res = buffer->unlock();
+ if (res != OK) {
+ jniThrowRuntimeException(env, "unlock buffer failed");
+ }
+ ALOGV("Successfully unlocked the image");
+ }
+}
+
+static jint Image_getWidth(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getWidth();
+}
+
+static jint Image_getHeight(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getHeight();
+}
+
+// Some formats like JPEG defined with different values between android.graphics.ImageFormat and
+// graphics.h, need convert to the one defined in graphics.h here.
+static int Image_getPixelFormat(JNIEnv* env, int format) {
+ int jpegFormat;
+ jfieldID fid;
+
+ ALOGV("%s: format = 0x%x", __FUNCTION__, format);
+
+ jclass imageFormatClazz = env->FindClass("android/graphics/ImageFormat");
+ ALOG_ASSERT(imageFormatClazz != NULL);
+
+ fid = env->GetStaticFieldID(imageFormatClazz, "JPEG", "I");
+ jpegFormat = env->GetStaticIntField(imageFormatClazz, fid);
+
+ // Translate the JPEG to BLOB for camera purpose.
+ if (format == jpegFormat) {
+ format = HAL_PIXEL_FORMAT_BLOB;
+ }
+
+ return format;
+}
+
+static jint Image_getFormat(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return 0;
+ }
+
+ return Image_getPixelFormat(env, buffer->getPixelFormat());
+}
+
+static void Image_setFenceFd(JNIEnv* env, jobject thiz, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_getLockedImage(JNIEnv* env, jobject thiz, LockedImage *image) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ int fenceFd = -1;
+ Image_getNativeContext(env, thiz, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ void* pData = NULL;
+ android_ycbcr ycbcr = android_ycbcr();
+ status_t res;
+ int format = Image_getFormat(env, thiz);
+ int flexFormat = format;
+ if (isPossiblyYUV(format)) {
+ // ImageWriter doesn't use crop by itself, app sets it, use the no crop version.
+ res = buffer->lockAsyncYCbCr(GRALLOC_USAGE_SW_WRITE_OFTEN, &ycbcr, fenceFd);
+ // Clear the fenceFd as it is already consumed by lock call.
+ Image_setFenceFd(env, thiz, /*fenceFd*/-1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsyncYCbCr failed for YUV buffer");
+ return;
+ }
+ pData = ycbcr.y;
+ flexFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
+ }
+
+ // lockAsyncYCbCr for YUV is unsuccessful.
+ if (pData == NULL) {
+ res = buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &pData, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsync failed");
+ return;
+ }
+ }
+
+ image->data = reinterpret_cast<uint8_t*>(pData);
+ image->width = buffer->getWidth();
+ image->height = buffer->getHeight();
+ image->format = format;
+ image->flexFormat = flexFormat;
+ image->stride = (ycbcr.y != NULL) ? static_cast<uint32_t>(ycbcr.ystride) : buffer->getStride();
+
+ image->dataCb = reinterpret_cast<uint8_t*>(ycbcr.cb);
+ image->dataCr = reinterpret_cast<uint8_t*>(ycbcr.cr);
+ image->chromaStride = static_cast<uint32_t>(ycbcr.cstride);
+ image->chromaStep = static_cast<uint32_t>(ycbcr.chroma_step);
+ ALOGV("Successfully locked the image");
+ // crop, transform, scalingMode, timestamp, and frameNumber should be set by producer,
+ // and we don't set them here.
+}
+
+static bool usingRGBAToJpegOverride(int32_t bufferFormat, int32_t writerCtxFormat) {
+ return writerCtxFormat == HAL_PIXEL_FORMAT_BLOB && bufferFormat == HAL_PIXEL_FORMAT_RGBA_8888;
+}
+
+static int32_t applyFormatOverrides(int32_t bufferFormat, int32_t writerCtxFormat)
+{
+ // Using HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers containing JPEGs to get around SW
+ // write limitations for some platforms (b/17379185).
+ if (usingRGBAToJpegOverride(bufferFormat, writerCtxFormat)) {
+ return HAL_PIXEL_FORMAT_BLOB;
+ }
+ return bufferFormat;
+}
+
+static uint32_t Image_getJpegSize(LockedImage* buffer, bool usingRGBAOverride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ uint32_t size = 0;
+ uint32_t width = buffer->width;
+ uint8_t* jpegBuffer = buffer->data;
+
+ if (usingRGBAOverride) {
+ width = (buffer->width + buffer->stride * (buffer->height - 1)) * 4;
+ }
+
+ // First check for JPEG transport header at the end of the buffer
+ uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob));
+ struct camera3_jpeg_blob *blob = (struct camera3_jpeg_blob*)(header);
+ if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID) {
+ size = blob->jpeg_size;
+ ALOGV("%s: Jpeg size = %d", __FUNCTION__, size);
+ }
+
+ // failed to find size, default to whole buffer
+ if (size == 0) {
+ /*
+ * This is a problem because not including the JPEG header
+ * means that in certain rare situations a regular JPEG blob
+ * will be misidentified as having a header, in which case
+ * we will get a garbage size value.
+ */
+ ALOGW("%s: No JPEG header detected, defaulting to size=width=%d",
+ __FUNCTION__, width);
+ size = width;
+ }
+
+ return size;
+}
+
+static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
+ int32_t writerFormat, uint8_t **base, uint32_t *size, int *pixelStride, int *rowStride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ ALOG_ASSERT(base != NULL, "base is NULL!!!");
+ ALOG_ASSERT(size != NULL, "size is NULL!!!");
+ ALOG_ASSERT(pixelStride != NULL, "pixelStride is NULL!!!");
+ ALOG_ASSERT(rowStride != NULL, "rowStride is NULL!!!");
+ ALOG_ASSERT((idx < IMAGE_WRITER_MAX_NUM_PLANES) && (idx >= 0));
+
+ ALOGV("%s: buffer: %p", __FUNCTION__, buffer);
+
+ uint32_t dataSize, ySize, cSize, cStride;
+ uint32_t pStride = 0, rStride = 0;
+ uint8_t *cb, *cr;
+ uint8_t *pData = NULL;
+ int bytesPerPixel = 0;
+
+ dataSize = ySize = cSize = cStride = 0;
+ int32_t fmt = buffer->flexFormat;
+
+ bool usingRGBAOverride = usingRGBAToJpegOverride(fmt, writerFormat);
+ fmt = applyFormatOverrides(fmt, writerFormat);
+ switch (fmt) {
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ buffer->dataCb :
+ buffer->dataCr;
+ // only map until last pixel
+ if (idx == 0) {
+ pStride = 1;
+ rStride = buffer->stride;
+ dataSize = buffer->stride * (buffer->height - 1) + buffer->width;
+ } else {
+ pStride = buffer->chromaStep;
+ rStride = buffer->chromaStride;
+ dataSize = buffer->chromaStride * (buffer->height / 2 - 1) +
+ buffer->chromaStep * (buffer->width / 2 - 1) + 1;
+ }
+ break;
+ // NV21
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ cr = buffer->data + (buffer->stride * buffer->height);
+ cb = cr + 1;
+ // only map until last pixel
+ ySize = buffer->width * (buffer->height - 1) + buffer->width;
+ cSize = buffer->width * (buffer->height / 2 - 1) + buffer->width - 1;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb:
+ cr;
+
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = (idx == 0) ? 1 : 2;
+ rStride = buffer->width;
+ break;
+ case HAL_PIXEL_FORMAT_YV12:
+ // Y and C stride need to be 16 pixel aligned.
+ LOG_ALWAYS_FATAL_IF(buffer->stride % 16,
+ "Stride is not 16 pixel aligned %d", buffer->stride);
+
+ ySize = buffer->stride * buffer->height;
+ cStride = ALIGN(buffer->stride / 2, 16);
+ cr = buffer->data + ySize;
+ cSize = cStride * buffer->height / 2;
+ cb = cr + cSize;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb :
+ cr;
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = 1;
+ rStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
+ break;
+ case HAL_PIXEL_FORMAT_Y8:
+ // Single plane, 8bpp.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 1;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_Y16:
+ bytesPerPixel = 2;
+ // Single plane, 16bpp, strides are specified in pixels, not in bytes
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_BLOB:
+ // Used for JPEG data, height must be 1, width == size, single plane.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ ALOG_ASSERT(buffer->height == 1, "JPEG should has height value %d", buffer->height);
+
+ pData = buffer->data;
+ dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
+ pStride = bytesPerPixel;
+ rowStride = 0;
+ break;
+ case HAL_PIXEL_FORMAT_RAW16:
+ // Single plane 16bpp bayer data.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RAW10:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 10 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 10 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 0;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ // Single plane, 32bpp.
+ bytesPerPixel = 4;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 4;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ // Single plane, 16bpp.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_888:
+ // Single plane, 24bpp.
+ bytesPerPixel = 3;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 3;
+ break;
+ default:
+ jniThrowExceptionFmt(env, "java/lang/UnsupportedOperationException",
+ "Pixel format: 0x%x is unsupported", fmt);
+ break;
+ }
+
+ *base = pData;
+ *size = dataSize;
+ *pixelStride = pStride;
+ *rowStride = rStride;
+}
+
+static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
+ int numPlanes, int writerFormat) {
+ ALOGV("%s: create SurfacePlane array with size %d", __FUNCTION__, numPlanes);
+ int rowStride, pixelStride;
+ uint8_t *pData;
+ uint32_t dataSize;
+ jobject byteBuffer;
+
+ int format = Image_getFormat(env, thiz);
+ if (isFormatOpaque(format) && numPlanes > 0) {
+ String8 msg;
+ msg.appendFormat("Format 0x%x is opaque, thus not writable, the number of planes (%d)"
+ " must be 0", format, numPlanes);
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg.string());
+ return NULL;
+ }
+
+ jobjectArray surfacePlanes = env->NewObjectArray(numPlanes, gSurfacePlaneClassInfo.clazz,
+ /*initial_element*/NULL);
+ if (surfacePlanes == NULL) {
+ jniThrowRuntimeException(env, "Failed to create SurfacePlane arrays,"
+ " probably out of memory");
+ return NULL;
+ }
+ if (isFormatOpaque(format)) {
+ return surfacePlanes;
+ }
+
+ // Buildup buffer info: rowStride, pixelStride and byteBuffers.
+ LockedImage lockedImg = LockedImage();
+ Image_getLockedImage(env, thiz, &lockedImg);
+
+ // Create all SurfacePlanes
+ writerFormat = Image_getPixelFormat(env, writerFormat);
+ for (int i = 0; i < numPlanes; i++) {
+ Image_getLockedImageInfo(env, &lockedImg, i, writerFormat,
+ &pData, &dataSize, &pixelStride, &rowStride);
+ byteBuffer = env->NewDirectByteBuffer(pData, dataSize);
+ if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to allocate ByteBuffer");
+ return NULL;
+ }
+
+ // Finally, create this SurfacePlane.
+ jobject surfacePlane = env->NewObject(gSurfacePlaneClassInfo.clazz,
+ gSurfacePlaneClassInfo.ctor, thiz, rowStride, pixelStride, byteBuffer);
+ env->SetObjectArrayElement(surfacePlanes, i, surfacePlane);
+ }
+
+ return surfacePlanes;
+}
+
+// -------------------------------Private convenience methods--------------------
+
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
+static bool isPossiblyYUV(PixelFormat format) {
+ switch (static_cast<int>(format)) {
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGB_888:
+ case HAL_PIXEL_FORMAT_RGB_565:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_Y8:
+ case HAL_PIXEL_FORMAT_Y16:
+ case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ return false;
+
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ case HAL_PIXEL_FORMAT_YCbCr_422_I:
+ default:
+ return true;
+ }
+}
+
+} // extern "C"
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gImageWriterMethods[] = {
+ {"nativeClassInit", "()V", (void*)ImageWriter_classInit },
+ {"nativeInit", "(Ljava/lang/Object;Landroid/view/Surface;I)J",
+ (void*)ImageWriter_init },
+ {"nativeClose", "(J)V", (void*)ImageWriter_close },
+ {"nativeAttachAndQueueImage", "(JJIJIIII)I", (void*)ImageWriter_attachAndQueueImage },
+ {"nativeDequeueInputImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_dequeueImage },
+ {"nativeQueueInputImage", "(JLandroid/media/Image;JIIII)V", (void*)ImageWriter_queueImage },
+ {"cancelImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_cancelImage },
+};
+
+static JNINativeMethod gImageMethods[] = {
+ {"nativeCreatePlanes", "(II)[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;",
+ (void*)Image_createSurfacePlanes },
+ {"nativeGetWidth", "()I", (void*)Image_getWidth },
+ {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetFormat", "()I", (void*)Image_getFormat },
+};
+
+int register_android_media_ImageWriter(JNIEnv *env) {
+
+ int ret1 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter", gImageWriterMethods, NELEM(gImageWriterMethods));
+
+ int ret2 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter$WriterSurfaceImage", gImageMethods, NELEM(gImageMethods));
+
+ return (ret1 || ret2);
+}
+
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
index 16758d0..5f586a9 100644
--- a/media/jni/android_media_MediaCodec.cpp
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -70,6 +70,11 @@ static struct CodecActionCodes {
jint codecActionRecoverable;
} gCodecActionCodes;
+static struct ExceptionReason {
+ jint reasonHardware;
+ jint reasonReclaimed;
+} gExceptionReason;
+
struct fields_t {
jfieldID context;
jmethodID postEventFromNativeID;
@@ -568,7 +573,7 @@ static jthrowable createCodecException(
env, env->FindClass("android/media/MediaCodec$CodecException"));
CHECK(clazz.get() != NULL);
- const jmethodID ctor = env->GetMethodID(clazz.get(), "<init>", "(IILjava/lang/String;)V");
+ const jmethodID ctor = env->GetMethodID(clazz.get(), "<init>", "(IILjava/lang/String;I)V");
CHECK(ctor != NULL);
ScopedLocalRef<jstring> msgObj(
@@ -587,7 +592,9 @@ static jthrowable createCodecException(
break;
}
- return (jthrowable)env->NewObject(clazz.get(), ctor, err, actionCode, msgObj.get());
+ // TODO: propagate reason from MediaCodec.
+ int reason = gExceptionReason.reasonHardware;
+ return (jthrowable)env->NewObject(clazz.get(), ctor, err, actionCode, msgObj.get(), reason);
}
void JMediaCodec::handleCallback(const sp<AMessage> &msg) {
@@ -1454,6 +1461,16 @@ static void android_media_MediaCodec_native_init(JNIEnv *env) {
CHECK(field != NULL);
gCodecActionCodes.codecActionRecoverable =
env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "REASON_HARDWARE", "I");
+ CHECK(field != NULL);
+ gExceptionReason.reasonHardware =
+ env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "REASON_RECLAIMED", "I");
+ CHECK(field != NULL);
+ gExceptionReason.reasonReclaimed =
+ env->GetStaticIntField(clazz.get(), field);
}
static void android_media_MediaCodec_native_setup(
diff --git a/media/jni/android_media_MediaCodecList.cpp b/media/jni/android_media_MediaCodecList.cpp
index f8c349b..82dd48d 100644
--- a/media/jni/android_media_MediaCodecList.cpp
+++ b/media/jni/android_media_MediaCodecList.cpp
@@ -262,6 +262,27 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
return caps;
}
+static jobject android_media_MediaCodecList_getGlobalSettings(JNIEnv *env, jobject /* thiz */) {
+ sp<IMediaCodecList> mcl = getCodecList(env);
+ if (mcl == NULL) {
+ // Runtime exception already pending.
+ return NULL;
+ }
+
+ const sp<AMessage> settings = mcl->getGlobalSettings();
+ if (settings == NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "cannot get global settings");
+ return NULL;
+ }
+
+ jobject settingsObj = NULL;
+ if (ConvertMessageToMap(env, settings, &settingsObj)) {
+ return NULL;
+ }
+
+ return settingsObj;
+}
+
static void android_media_MediaCodecList_native_init(JNIEnv* /* env */) {
}
@@ -277,6 +298,10 @@ static JNINativeMethod gMethods[] = {
"(ILjava/lang/String;)Landroid/media/MediaCodecInfo$CodecCapabilities;",
(void *)android_media_MediaCodecList_getCodecCapabilities },
+ { "native_getGlobalSettings",
+ "()Ljava/util/Map;",
+ (void *)android_media_MediaCodecList_getGlobalSettings },
+
{ "findCodecByName", "(Ljava/lang/String;)I",
(void *)android_media_MediaCodecList_findCodecByName },
diff --git a/media/jni/android_media_MediaCrypto.cpp b/media/jni/android_media_MediaCrypto.cpp
index d2216fb..a9accb0 100644
--- a/media/jni/android_media_MediaCrypto.cpp
+++ b/media/jni/android_media_MediaCrypto.cpp
@@ -140,6 +140,15 @@ sp<ICrypto> JCrypto::GetCrypto(JNIEnv *env, jobject obj) {
return jcrypto->mCrypto;
}
+// JNI conversion utilities
+static Vector<uint8_t> JByteArrayToVector(JNIEnv *env, jbyteArray const &byteArray) {
+ Vector<uint8_t> vector;
+ size_t length = env->GetArrayLength(byteArray);
+ vector.insertAt((size_t)0, length);
+ env->GetByteArrayRegion(byteArray, 0, length, (jbyte *)vector.editArray());
+ return vector;
+}
+
} // namespace android
using namespace android;
@@ -274,6 +283,37 @@ static jboolean android_media_MediaCrypto_requiresSecureDecoderComponent(
return result ? JNI_TRUE : JNI_FALSE;
}
+static void android_media_MediaCrypto_setMediaDrmSession(
+ JNIEnv *env, jobject thiz, jbyteArray jsessionId) {
+ if (jsessionId == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<ICrypto> crypto = JCrypto::GetCrypto(env, thiz);
+
+ if (crypto == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ Vector<uint8_t> sessionId(JByteArrayToVector(env, jsessionId));
+
+ status_t err = crypto->setMediaDrmSession(sessionId);
+
+ String8 msg("setMediaDrmSession failed");
+ if (err == ERROR_DRM_SESSION_NOT_OPENED) {
+ msg += ": session not opened";
+ } else if (err == ERROR_UNSUPPORTED) {
+ msg += ": not supported by this crypto scheme";
+ } else if (err == NO_INIT) {
+ msg += ": crypto plugin not initialized";
+ } else if (err != OK) {
+ msg.appendFormat(": general failure (%d)", err);
+ }
+ jniThrowException(env, "android/media/MediaCryptoException", msg.string());
+}
+
static JNINativeMethod gMethods[] = {
{ "release", "()V", (void *)android_media_MediaCrypto_release },
{ "native_init", "()V", (void *)android_media_MediaCrypto_native_init },
@@ -289,6 +329,9 @@ static JNINativeMethod gMethods[] = {
{ "requiresSecureDecoderComponent", "(Ljava/lang/String;)Z",
(void *)android_media_MediaCrypto_requiresSecureDecoderComponent },
+
+ { "setMediaDrmSession", "([B)V",
+ (void *)android_media_MediaCrypto_setMediaDrmSession },
};
int register_android_media_Crypto(JNIEnv *env) {
diff --git a/media/jni/android_media_MediaDataSource.cpp b/media/jni/android_media_MediaDataSource.cpp
new file mode 100644
index 0000000..1e6d2af
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "JMediaDataSource-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaDataSource.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/Log.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+JMediaDataSource::JMediaDataSource(JNIEnv* env, jobject source)
+ : mJavaObjStatus(OK), mSizeIsCached(false), mCachedSize(0), mMemory(NULL) {
+ mMediaDataSourceObj = env->NewGlobalRef(source);
+ CHECK(mMediaDataSourceObj != NULL);
+
+ ScopedLocalRef<jclass> mediaDataSourceClass(env, env->GetObjectClass(mMediaDataSourceObj));
+ CHECK(mediaDataSourceClass.get() != NULL);
+
+ mReadMethod = env->GetMethodID(mediaDataSourceClass.get(), "readAt", "(J[BI)I");
+ CHECK(mReadMethod != NULL);
+ mGetSizeMethod = env->GetMethodID(mediaDataSourceClass.get(), "getSize", "()J");
+ CHECK(mGetSizeMethod != NULL);
+ mCloseMethod = env->GetMethodID(mediaDataSourceClass.get(), "close", "()V");
+ CHECK(mCloseMethod != NULL);
+
+ ScopedLocalRef<jbyteArray> tmp(env, env->NewByteArray(kBufferSize));
+ mByteArrayObj = (jbyteArray)env->NewGlobalRef(tmp.get());
+ CHECK(mByteArrayObj != NULL);
+
+ sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "JMediaDataSource");
+ mMemory = memoryDealer->allocate(kBufferSize);
+ if (mMemory == NULL) {
+ ALOGE("Failed to allocate memory!");
+ }
+}
+
+JMediaDataSource::~JMediaDataSource() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->DeleteGlobalRef(mMediaDataSourceObj);
+ env->DeleteGlobalRef(mByteArrayObj);
+}
+
+sp<IMemory> JMediaDataSource::getIMemory() {
+ Mutex::Autolock lock(mLock);
+ return mMemory;
+}
+
+ssize_t JMediaDataSource::readAt(off64_t offset, size_t size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK || mMemory == NULL) {
+ return -1;
+ }
+ if (size > kBufferSize) {
+ size = kBufferSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint numread = env->CallIntMethod(mMediaDataSourceObj, mReadMethod,
+ (jlong)offset, mByteArrayObj, (jint)size);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in readAt()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+ if (numread < 0) {
+ ALOGW("An error occurred in readAt()");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+ if ((size_t)numread > size) {
+ ALOGE("readAt read too many bytes.");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+
+ ALOGV("readAt %lld / %zu => %d.", (long long)offset, size, numread);
+ env->GetByteArrayRegion(mByteArrayObj, 0, numread, (jbyte*)mMemory->pointer());
+ return numread;
+}
+
+status_t JMediaDataSource::getSize(off64_t* size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK) {
+ return UNKNOWN_ERROR;
+ }
+ if (mSizeIsCached) {
+ return mCachedSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ *size = env->CallLongMethod(mMediaDataSourceObj, mGetSizeMethod);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in getSize()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ // After returning an error, size shouldn't be used by callers.
+ *size = UNKNOWN_ERROR;
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return UNKNOWN_ERROR;
+ }
+
+ // The minimum size should be -1, which indicates unknown size.
+ if (*size < 0) {
+ *size = -1;
+ }
+
+ mCachedSize = *size;
+ mSizeIsCached = true;
+ return OK;
+}
+
+void JMediaDataSource::close() {
+ Mutex::Autolock lock(mLock);
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(mMediaDataSourceObj, mCloseMethod);
+ // The closed state is effectively the same as an error state.
+ mJavaObjStatus = UNKNOWN_ERROR;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_MediaDataSource.h b/media/jni/android_media_MediaDataSource.h
new file mode 100644
index 0000000..2bc237e
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIADATASOURCE_H_
+#define _ANDROID_MEDIA_MEDIADATASOURCE_H_
+
+#include "jni.h"
+
+#include <media/IDataSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+// The native counterpart to a Java android.media.MediaDataSource. It inherits from
+// IDataSource so that it can be accessed remotely.
+//
+// If the java DataSource returns an error or throws an exception it
+// will be considered to be in a broken state, and the only further call this
+// will make is to close().
+class JMediaDataSource : public BnDataSource {
+public:
+ enum {
+ kBufferSize = 64 * 1024,
+ };
+
+ JMediaDataSource(JNIEnv *env, jobject source);
+ virtual ~JMediaDataSource();
+
+ virtual sp<IMemory> getIMemory();
+ virtual ssize_t readAt(off64_t offset, size_t size);
+ virtual status_t getSize(off64_t* size);
+ virtual void close();
+
+private:
+ // Protect all member variables with mLock because this object will be
+ // accessed on different binder worker threads.
+ Mutex mLock;
+
+ // The status of the java DataSource. Set to OK unless an error occurred or
+ // close() was called.
+ status_t mJavaObjStatus;
+ // Only call the java getSize() once so the app can't change the size on us.
+ bool mSizeIsCached;
+ off64_t mCachedSize;
+ sp<IMemory> mMemory;
+
+ jobject mMediaDataSourceObj;
+ jmethodID mReadMethod;
+ jmethodID mGetSizeMethod;
+ jmethodID mCloseMethod;
+ jbyteArray mByteArrayObj;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaDataSource);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIADATASOURCE_H_
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index 8302a34..f8146a7 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -59,6 +59,7 @@ namespace android {
struct RequestFields {
jfieldID data;
jfieldID defaultUrl;
+ jfieldID requestType;
};
struct ArrayListFields {
@@ -95,12 +96,24 @@ struct EventTypes {
jint kEventSessionReclaimed;
} gEventTypes;
+struct EventWhat {
+ jint kWhatDrmEvent;
+ jint kWhatExpirationUpdate;
+ jint kWhatKeysChange;
+} gEventWhat;
+
struct KeyTypes {
jint kKeyTypeStreaming;
jint kKeyTypeOffline;
jint kKeyTypeRelease;
} gKeyTypes;
+struct KeyRequestTypes {
+ jint kKeyRequestTypeInitial;
+ jint kKeyRequestTypeRenewal;
+ jint kKeyRequestTypeRelease;
+} gKeyRequestTypes;
+
struct CertificateTypes {
jint kCertificateTypeNone;
jint kCertificateTypeX509;
@@ -179,25 +192,37 @@ JNIDrmListener::~JNIDrmListener()
void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
const Parcel *obj)
{
- jint jeventType;
+ jint jwhat;
+ jint jeventType = 0;
// translate DrmPlugin event types into their java equivalents
- switch(eventType) {
+ switch (eventType) {
case DrmPlugin::kDrmPluginEventProvisionRequired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventProvisionRequired;
break;
case DrmPlugin::kDrmPluginEventKeyNeeded:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyRequired;
break;
case DrmPlugin::kDrmPluginEventKeyExpired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyExpired;
break;
case DrmPlugin::kDrmPluginEventVendorDefined:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventVendorDefined;
break;
case DrmPlugin::kDrmPluginEventSessionReclaimed:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventSessionReclaimed;
break;
+ case DrmPlugin::kDrmPluginEventExpirationUpdate:
+ jwhat = gEventWhat.kWhatExpirationUpdate;
+ break;
+ case DrmPlugin::kDrmPluginEventKeysChange:
+ jwhat = gEventWhat.kWhatKeysChange;
+ break;
default:
ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
return;
@@ -210,7 +235,7 @@ void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
Parcel* nativeParcel = parcelForJavaObject(env, jParcel);
nativeParcel->setData(obj->data(), obj->dataSize());
env->CallStaticVoidMethod(mClass, gFields.post_event, mObject,
- jeventType, extra, jParcel);
+ jwhat, jeventType, extra, jParcel);
env->DeleteLocalRef(jParcel);
}
}
@@ -236,7 +261,7 @@ static bool throwExceptionAsNecessary(
const char *drmMessage = NULL;
- switch(err) {
+ switch (err) {
case ERROR_DRM_UNKNOWN:
drmMessage = "General DRM error";
break;
@@ -566,7 +591,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm");
GET_FIELD_ID(gFields.context, clazz, "mNativeContext", "J");
GET_STATIC_METHOD_ID(gFields.post_event, clazz, "postEventFromNative",
- "(Ljava/lang/Object;IILjava/lang/Object;)V");
+ "(Ljava/lang/Object;IIILjava/lang/Object;)V");
jfieldID field;
GET_STATIC_FIELD_ID(field, clazz, "EVENT_PROVISION_REQUIRED", "I");
@@ -580,6 +605,13 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
GET_STATIC_FIELD_ID(field, clazz, "EVENT_SESSION_RECLAIMED", "I");
gEventTypes.kEventSessionReclaimed = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "DRM_EVENT", "I");
+ gEventWhat.kWhatDrmEvent = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "EXPIRATION_UPDATE", "I");
+ gEventWhat.kWhatExpirationUpdate = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "KEYS_CHANGE", "I");
+ gEventWhat.kWhatKeysChange = env->GetStaticIntField(clazz, field);
+
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_STREAMING", "I");
gKeyTypes.kKeyTypeStreaming = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_OFFLINE", "I");
@@ -587,6 +619,13 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_RELEASE", "I");
gKeyTypes.kKeyTypeRelease = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_INITIAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeInitial = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RENEWAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeRenewal = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RELEASE", "I");
+ gKeyRequestTypes.kKeyRequestTypeRelease = env->GetStaticIntField(clazz, field);
+
GET_STATIC_FIELD_ID(field, clazz, "CERTIFICATE_TYPE_NONE", "I");
gCertificateTypes.kCertificateTypeNone = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "CERTIFICATE_TYPE_X509", "I");
@@ -595,6 +634,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm$KeyRequest");
GET_FIELD_ID(gFields.keyRequest.data, clazz, "mData", "[B");
GET_FIELD_ID(gFields.keyRequest.defaultUrl, clazz, "mDefaultUrl", "Ljava/lang/String;");
+ GET_FIELD_ID(gFields.keyRequest.requestType, clazz, "mRequestType", "I");
FIND_CLASS(clazz, "android/media/MediaDrm$ProvisionRequest");
GET_FIELD_ID(gFields.provisionRequest.data, clazz, "mData", "[B");
@@ -786,9 +826,10 @@ static jobject android_media_MediaDrm_getKeyRequest(
Vector<uint8_t> request;
String8 defaultUrl;
+ DrmPlugin::KeyRequestType keyRequestType;
status_t err = drm->getKeyRequest(sessionId, initData, mimeType,
- keyType, optParams, request, defaultUrl);
+ keyType, optParams, request, defaultUrl, &keyRequestType);
if (throwExceptionAsNecessary(env, err, "Failed to get key request")) {
return NULL;
@@ -807,6 +848,25 @@ static jobject android_media_MediaDrm_getKeyRequest(
jstring jdefaultUrl = env->NewStringUTF(defaultUrl.string());
env->SetObjectField(keyObj, gFields.keyRequest.defaultUrl, jdefaultUrl);
+
+ switch (keyRequestType) {
+ case DrmPlugin::kKeyRequestType_Initial:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeInitial);
+ break;
+ case DrmPlugin::kKeyRequestType_Renewal:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRenewal);
+ break;
+ case DrmPlugin::kKeyRequestType_Release:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRelease);
+ break;
+ default:
+ throwStateException(env, "DRM plugin failure: unknown key request type",
+ ERROR_DRM_UNKNOWN);
+ break;
+ }
}
return keyObj;
diff --git a/media/jni/android_media_MediaExtractor.cpp b/media/jni/android_media_MediaExtractor.cpp
index c0795b6..b6b7a80 100644
--- a/media/jni/android_media_MediaExtractor.cpp
+++ b/media/jni/android_media_MediaExtractor.cpp
@@ -25,6 +25,7 @@
#include "android_runtime/Log.h"
#include "jni.h"
#include "JNIHelp.h"
+#include "android_media_MediaDataSource.h"
#include <media/IMediaHTTPService.h>
#include <media/hardware/CryptoAPI.h>
@@ -50,74 +51,6 @@ struct fields_t {
static fields_t gFields;
-class JavaDataSourceBridge : public DataSource {
- jmethodID mReadMethod;
- jmethodID mGetSizeMethod;
- jmethodID mCloseMethod;
- jobject mDataSource;
- public:
- JavaDataSourceBridge(JNIEnv *env, jobject source) {
- mDataSource = env->NewGlobalRef(source);
-
- jclass datasourceclass = env->GetObjectClass(mDataSource);
- CHECK(datasourceclass != NULL);
-
- mReadMethod = env->GetMethodID(datasourceclass, "readAt", "(J[BI)I");
- CHECK(mReadMethod != NULL);
-
- mGetSizeMethod = env->GetMethodID(datasourceclass, "getSize", "()J");
- CHECK(mGetSizeMethod != NULL);
-
- mCloseMethod = env->GetMethodID(datasourceclass, "close", "()V");
- CHECK(mCloseMethod != NULL);
- }
-
- ~JavaDataSourceBridge() {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
- env->CallVoidMethod(mDataSource, mCloseMethod);
- env->DeleteGlobalRef(mDataSource);
- }
-
- virtual status_t initCheck() const {
- return OK;
- }
-
- virtual ssize_t readAt(off64_t offset, void* buffer, size_t size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- // XXX could optimize this by reusing the same array
- jbyteArray byteArrayObj = env->NewByteArray(size);
- env->DeleteLocalRef(env->GetObjectClass(mDataSource));
- env->DeleteLocalRef(env->GetObjectClass(byteArrayObj));
- ssize_t numread = env->CallIntMethod(mDataSource, mReadMethod, offset, byteArrayObj, (jint)size);
- env->GetByteArrayRegion(byteArrayObj, 0, size, (jbyte*) buffer);
- env->DeleteLocalRef(byteArrayObj);
- if (env->ExceptionCheck()) {
- ALOGW("Exception occurred while reading %zu at %lld", size, (long long)offset);
- LOGW_EX(env);
- env->ExceptionClear();
- return -1;
- }
- return numread;
- }
-
- virtual status_t getSize(off64_t *size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- CHECK(size != NULL);
-
- int64_t len = env->CallLongMethod(mDataSource, mGetSizeMethod);
- if (len < 0) {
- *size = ERROR_UNSUPPORTED;
- } else {
- *size = len;
- }
- return OK;
- }
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
JMediaExtractor::JMediaExtractor(JNIEnv *env, jobject thiz)
: mClass(NULL),
mObject(NULL) {
@@ -777,7 +710,8 @@ static void android_media_MediaExtractor_setDataSourceCallback(
return;
}
- sp<JavaDataSourceBridge> bridge = new JavaDataSourceBridge(env, callbackObj);
+ sp<DataSource> bridge =
+ DataSource::CreateFromIDataSource(new JMediaDataSource(env, callbackObj));
status_t err = extractor->setDataSource(bridge);
if (err != OK) {
@@ -881,7 +815,7 @@ static JNINativeMethod gMethods[] = {
{ "setDataSource", "(Ljava/io/FileDescriptor;JJ)V",
(void *)android_media_MediaExtractor_setDataSourceFd },
- { "setDataSource", "(Landroid/media/DataSource;)V",
+ { "setDataSource", "(Landroid/media/MediaDataSource;)V",
(void *)android_media_MediaExtractor_setDataSourceCallback },
{ "getCachedDuration", "()J",
diff --git a/media/jni/android_media_MediaHTTPConnection.cpp b/media/jni/android_media_MediaHTTPConnection.cpp
index 7226ef5..393003d 100644
--- a/media/jni/android_media_MediaHTTPConnection.cpp
+++ b/media/jni/android_media_MediaHTTPConnection.cpp
@@ -134,7 +134,6 @@ static jobject android_media_MediaHTTPConnection_native_getIMemory(
static jint android_media_MediaHTTPConnection_native_readAt(
JNIEnv *env, jobject thiz, jlong offset, jint size) {
sp<JMediaHTTPConnection> conn = getObject(env, thiz);
-
if (size > JMediaHTTPConnection::kBufferSize) {
size = JMediaHTTPConnection::kBufferSize;
}
diff --git a/media/jni/android_media_MediaMetadataRetriever.cpp b/media/jni/android_media_MediaMetadataRetriever.cpp
index 2f6bbf4..88a6771 100644
--- a/media/jni/android_media_MediaMetadataRetriever.cpp
+++ b/media/jni/android_media_MediaMetadataRetriever.cpp
@@ -30,6 +30,7 @@
#include "jni.h"
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
+#include "android_media_MediaDataSource.h"
#include "android_media_Utils.h"
#include "android_util_Binder.h"
@@ -171,6 +172,23 @@ static void android_media_MediaMetadataRetriever_setDataSourceFD(JNIEnv *env, jo
process_media_retriever_call(env, retriever->setDataSource(fd, offset, length), "java/lang/RuntimeException", "setDataSource failed");
}
+static void android_media_MediaMetadataRetriever_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ ALOGV("setDataSourceCallback");
+ MediaMetadataRetriever* retriever = getRetriever(env, thiz);
+ if (retriever == 0) {
+ jniThrowException(env, "java/lang/IllegalStateException", "No retriever available");
+ return;
+ }
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_retriever_call(env, retriever->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed");
+}
+
template<typename T>
static void rotate0(T* dst, const T* src, size_t width, size_t height)
{
@@ -456,6 +474,7 @@ static JNINativeMethod nativeMethods[] = {
},
{"setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaMetadataRetriever_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V", (void *)android_media_MediaMetadataRetriever_setDataSourceCallback},
{"_getFrameAtTime", "(JI)Landroid/graphics/Bitmap;", (void *)android_media_MediaMetadataRetriever_getFrameAtTime},
{"extractMetadata", "(I)Ljava/lang/String;", (void *)android_media_MediaMetadataRetriever_extractMetadata},
{"getEmbeddedPicture", "(I)[B", (void *)android_media_MediaMetadataRetriever_getEmbeddedPicture},
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 55643f7..2c61779 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -20,6 +20,7 @@
#include "utils/Log.h"
#include <media/mediaplayer.h>
+#include <media/AudioResamplerPublic.h>
#include <media/IMediaHTTPService.h>
#include <media/MediaPlayerInterface.h>
#include <stdio.h>
@@ -36,6 +37,9 @@
#include "utils/Errors.h" // for status_t
#include "utils/KeyedVector.h"
#include "utils/String8.h"
+#include "android_media_MediaDataSource.h"
+#include "android_media_PlaybackSettings.h"
+#include "android_media_SyncSettings.h"
#include "android_media_Utils.h"
#include "android_os_Parcel.h"
@@ -65,6 +69,9 @@ struct fields_t {
};
static fields_t fields;
+static PlaybackSettings::fields_t gPlaybackSettingsFields;
+static SyncSettings::fields_t gSyncSettingsFields;
+
static Mutex sLock;
// ----------------------------------------------------------------------------
@@ -251,6 +258,23 @@ android_media_MediaPlayer_setDataSourceFD(JNIEnv *env, jobject thiz, jobject fil
process_media_player_call( env, thiz, mp->setDataSource(fd, offset, length), "java/io/IOException", "setDataSourceFD failed." );
}
+static void
+android_media_MediaPlayer_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_player_call(env, thiz, mp->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed." );
+}
+
static sp<IGraphicBufferProducer>
getVideoSurfaceTexture(JNIEnv* env, jobject thiz) {
IGraphicBufferProducer * const p = (IGraphicBufferProducer*)env->GetLongField(thiz, fields.surface_texture);
@@ -402,15 +426,105 @@ android_media_MediaPlayer_isPlaying(JNIEnv *env, jobject thiz)
}
static void
-android_media_MediaPlayer_setPlaybackRate(JNIEnv *env, jobject thiz, jfloat rate)
+android_media_MediaPlayer_setPlaybackSettings(JNIEnv *env, jobject thiz, jobject settings)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ PlaybackSettings pbs;
+ pbs.fillFromJobject(env, gPlaybackSettingsFields, settings);
+ ALOGV("setPlaybackSettings: %d:%f %d:%f %d:%u %d:%u",
+ pbs.speedSet, pbs.audioRate.mSpeed,
+ pbs.pitchSet, pbs.audioRate.mPitch,
+ pbs.audioFallbackModeSet, pbs.audioRate.mFallbackMode,
+ pbs.audioStretchModeSet, pbs.audioRate.mStretchMode);
+
+ // TODO: pass playback settings to mediaplayer when audiotrack supports it
+ process_media_player_call(env, thiz, mp->setPlaybackRate(pbs.audioRate.mSpeed), NULL, NULL);
+}
+
+static jobject
+android_media_MediaPlayer_getPlaybackSettings(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ PlaybackSettings pbs;
+ AudioPlaybackRate &audioRate = pbs.audioRate;
+
+ audioRate.mSpeed = 1.0f;
+ audioRate.mPitch = 1.0f;
+ audioRate.mFallbackMode = AUDIO_TIMESTRETCH_FALLBACK_DEFAULT;
+ audioRate.mStretchMode = AUDIO_TIMESTRETCH_STRETCH_DEFAULT;
+
+ // TODO: get this from mediaplayer when audiotrack supports it
+ // process_media_player_call(
+ // env, thiz, mp->getPlaybackSettings(&audioRate), NULL, NULL);
+ ALOGV("getPlaybackSettings: %f %f %d %d",
+ audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
+
+ pbs.speedSet = true;
+ pbs.pitchSet = true;
+ pbs.audioFallbackModeSet = true;
+ pbs.audioStretchModeSet = true;
+
+ return pbs.asJobject(env, gPlaybackSettingsFields);
+}
+
+static void
+android_media_MediaPlayer_setSyncSettings(JNIEnv *env, jobject thiz, jobject settings)
{
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
if (mp == NULL) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return;
}
- ALOGV("setPlaybackRate: %f", rate);
- process_media_player_call(env, thiz, mp->setPlaybackRate(rate), NULL, NULL);
+
+ SyncSettings scs;
+ scs.fillFromJobject(env, gSyncSettingsFields, settings);
+ ALOGV("setSyncSettings: %d:%d %d:%d %d:%f %d:%f",
+ scs.syncSourceSet, scs.syncSource,
+ scs.audioAdjustModeSet, scs.audioAdjustMode,
+ scs.toleranceSet, scs.tolerance,
+ scs.frameRateSet, scs.frameRate);
+
+ // TODO: pass sync settings to mediaplayer when it supports it
+ // process_media_player_call(env, thiz, mp->setSyncSettings(scs), NULL, NULL);
+}
+
+static jobject
+android_media_MediaPlayer_getSyncSettings(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ SyncSettings scs;
+ scs.syncSource = 0; // SYNC_SOURCE_DEFAULT
+ scs.audioAdjustMode = 0; // AUDIO_ADJUST_MODE_DEFAULT
+ scs.tolerance = 0.f;
+ scs.frameRate = 0.f;
+
+ // TODO: get this from mediaplayer when it supports it
+ // process_media_player_call(
+ // env, thiz, mp->getSyncSettings(&scs), NULL, NULL);
+ ALOGV("getSyncSettings: %d %d %f %f",
+ scs.syncSource, scs.audioAdjustMode, scs.tolerance, scs.frameRate);
+
+ scs.syncSourceSet = true;
+ scs.audioAdjustModeSet = true;
+ scs.toleranceSet = true;
+ scs.frameRateSet = false;
+
+ return scs.asJobject(env, gSyncSettingsFields);
}
static void
@@ -679,6 +793,8 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
return;
}
+ env->DeleteLocalRef(clazz);
+
clazz = env->FindClass("android/net/ProxyInfo");
if (clazz == NULL) {
return;
@@ -692,6 +808,11 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
fields.proxyConfigGetExclusionList =
env->GetMethodID(clazz, "getExclusionListAsString", "()Ljava/lang/String;");
+
+ env->DeleteLocalRef(clazz);
+
+ gPlaybackSettingsFields.init(env);
+ gSyncSettingsFields.init(env);
}
static void
@@ -871,7 +992,8 @@ static JNINativeMethod gMethods[] = {
(void *)android_media_MediaPlayer_setDataSourceAndHeaders
},
- {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V",(void *)android_media_MediaPlayer_setDataSourceCallback },
{"_setVideoSurface", "(Landroid/view/Surface;)V", (void *)android_media_MediaPlayer_setVideoSurface},
{"_prepare", "()V", (void *)android_media_MediaPlayer_prepare},
{"prepareAsync", "()V", (void *)android_media_MediaPlayer_prepareAsync},
@@ -879,7 +1001,10 @@ static JNINativeMethod gMethods[] = {
{"_stop", "()V", (void *)android_media_MediaPlayer_stop},
{"getVideoWidth", "()I", (void *)android_media_MediaPlayer_getVideoWidth},
{"getVideoHeight", "()I", (void *)android_media_MediaPlayer_getVideoHeight},
- {"_setPlaybackRate", "(F)V", (void *)android_media_MediaPlayer_setPlaybackRate},
+ {"setPlaybackSettings", "(Landroid/media/PlaybackSettings;)V", (void *)android_media_MediaPlayer_setPlaybackSettings},
+ {"getPlaybackSettings", "()Landroid/media/PlaybackSettings;", (void *)android_media_MediaPlayer_getPlaybackSettings},
+ {"setSyncSettings", "(Landroid/media/SyncSettings;)V", (void *)android_media_MediaPlayer_setSyncSettings},
+ {"getSyncSettings", "()Landroid/media/SyncSettings;", (void *)android_media_MediaPlayer_getSyncSettings},
{"seekTo", "(I)V", (void *)android_media_MediaPlayer_seekTo},
{"_pause", "()V", (void *)android_media_MediaPlayer_pause},
{"isPlaying", "()Z", (void *)android_media_MediaPlayer_isPlaying},
@@ -914,8 +1039,8 @@ static int register_android_media_MediaPlayer(JNIEnv *env)
return AndroidRuntime::registerNativeMethods(env,
"android/media/MediaPlayer", gMethods, NELEM(gMethods));
}
-
extern int register_android_media_ImageReader(JNIEnv *env);
+extern int register_android_media_ImageWriter(JNIEnv *env);
extern int register_android_media_Crypto(JNIEnv *env);
extern int register_android_media_Drm(JNIEnv *env);
extern int register_android_media_MediaCodec(JNIEnv *env);
@@ -926,6 +1051,7 @@ extern int register_android_media_MediaMetadataRetriever(JNIEnv *env);
extern int register_android_media_MediaMuxer(JNIEnv *env);
extern int register_android_media_MediaRecorder(JNIEnv *env);
extern int register_android_media_MediaScanner(JNIEnv *env);
+extern int register_android_media_MediaSync(JNIEnv *env);
extern int register_android_media_ResampleInputStream(JNIEnv *env);
extern int register_android_media_MediaProfiles(JNIEnv *env);
extern int register_android_media_AmrInputStream(JNIEnv *env);
@@ -944,6 +1070,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
}
assert(env != NULL);
+ if (register_android_media_ImageWriter(env) != JNI_OK) {
+ ALOGE("ERROR: ImageWriter native registration failed");
+ goto bail;
+ }
+
if (register_android_media_ImageReader(env) < 0) {
ALOGE("ERROR: ImageReader native registration failed");
goto bail;
@@ -1004,6 +1135,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
goto bail;
}
+ if (register_android_media_MediaSync(env) < 0) {
+ ALOGE("ERROR: MediaSync native registration failed");
+ goto bail;
+ }
+
if (register_android_media_MediaExtractor(env) < 0) {
ALOGE("ERROR: MediaCodec native registration failed");
goto bail;
diff --git a/media/jni/android_media_MediaSync.cpp b/media/jni/android_media_MediaSync.cpp
new file mode 100644
index 0000000..f192262
--- /dev/null
+++ b/media/jni/android_media_MediaSync.cpp
@@ -0,0 +1,431 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSync-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaSync.h"
+
+#include "android_media_AudioTrack.h"
+#include "android_media_SyncSettings.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/android_view_Surface.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <gui/Surface.h>
+
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+struct fields_t {
+ jfieldID context;
+ jfieldID mediaTimestampMediaTimeUsID;
+ jfieldID mediaTimestampNanoTimeID;
+ jfieldID mediaTimestampClockRateID;
+};
+
+static fields_t gFields;
+static SyncSettings::fields_t gSyncSettingsFields;
+
+////////////////////////////////////////////////////////////////////////////////
+
+JMediaSync::JMediaSync() {
+ mSync = MediaSync::create();
+}
+
+JMediaSync::~JMediaSync() {
+}
+
+status_t JMediaSync::configureSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
+ return mSync->configureSurface(bufferProducer);
+}
+
+status_t JMediaSync::configureAudioTrack(
+ const sp<AudioTrack> &audioTrack,
+ int32_t nativeSampleRateInHz) {
+ return mSync->configureAudioTrack(audioTrack, nativeSampleRateInHz);
+}
+
+status_t JMediaSync::createInputSurface(
+ sp<IGraphicBufferProducer>* bufferProducer) {
+ return mSync->createInputSurface(bufferProducer);
+}
+
+status_t JMediaSync::setPlaybackRate(float rate) {
+ return mSync->setPlaybackRate(rate);
+}
+
+sp<const MediaClock> JMediaSync::getMediaClock() {
+ return mSync->getMediaClock();
+}
+
+status_t JMediaSync::updateQueuedAudioData(
+ int sizeInBytes, int64_t presentationTimeUs) {
+ return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+}
+
+status_t JMediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
+ return mSync->getPlayTimeForPendingAudioFrames(outTimeUs);
+}
+
+} // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) {
+ sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context);
+ if (sync != NULL) {
+ sync->incStrong(thiz);
+ }
+ if (old != NULL) {
+ old->decStrong(thiz);
+ }
+
+ env->SetLongField(thiz, gFields.context, (jlong)sync.get());
+
+ return old;
+}
+
+static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) {
+ return (JMediaSync *)env->GetLongField(thiz, gFields.context);
+}
+
+static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) {
+ setMediaSync(env, thiz, NULL);
+}
+
+static void throwExceptionAsNecessary(
+ JNIEnv *env, status_t err, const char *msg = NULL) {
+ switch (err) {
+ case NO_ERROR:
+ break;
+
+ case BAD_VALUE:
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg);
+ break;
+
+ case NO_INIT:
+ case INVALID_OPERATION:
+ default:
+ if (err > 0) {
+ break;
+ }
+ AString msgWithErrorCode(msg);
+ msgWithErrorCode.append(" error:");
+ msgWithErrorCode.append(err);
+ jniThrowException(env, "java/lang/IllegalStateException", msgWithErrorCode.c_str());
+ break;
+ }
+}
+
+static void android_media_MediaSync_native_configureSurface(
+ JNIEnv *env, jobject thiz, jobject jsurface) {
+ ALOGV("android_media_MediaSync_configureSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ if (jsurface != NULL) {
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface != NULL) {
+ bufferProducer = surface->getIGraphicBufferProducer();
+ } else {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->configureSurface(bufferProducer);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Surface has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to connect to surface with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static void android_media_MediaSync_native_configureAudioTrack(
+ JNIEnv *env, jobject thiz, jobject jaudioTrack, jint nativeSampleRateInHz) {
+ ALOGV("android_media_MediaSync_configureAudioTrack");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<AudioTrack> audioTrack;
+ if (jaudioTrack != NULL) {
+ audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack);
+ if (audioTrack == NULL) {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->configureAudioTrack(audioTrack, nativeSampleRateInHz);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Audio track has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to configure audio track with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static jobject android_media_MediaSync_createInputSurface(
+ JNIEnv* env, jobject thiz) {
+ ALOGV("android_media_MediaSync_createInputSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Tell the MediaSync that we want to use a Surface as input.
+ sp<IGraphicBufferProducer> bufferProducer;
+ status_t err = sync->createInputSurface(&bufferProducer);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Wrap the IGBP in a Java-language Surface.
+ return android_view_Surface_createFromIGraphicBufferProducer(env,
+ bufferProducer);
+}
+
+static void android_media_MediaSync_native_updateQueuedAudioData(
+ JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+}
+
+static jboolean android_media_MediaSync_native_getTimestamp(
+ JNIEnv *env, jobject thiz, jobject timestamp) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return JNI_FALSE;
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return JNI_FALSE;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t mediaUs = 0;
+ if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
+ return JNI_FALSE;
+ }
+
+ env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID,
+ (jlong)mediaUs);
+ env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID,
+ (jlong)(nowUs * 1000));
+ env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID,
+ (jfloat)mediaClock->getPlaybackRate());
+ return JNI_TRUE;
+}
+
+static jlong android_media_MediaSync_native_getPlayTimeForPendingAudioFrames(
+ JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ }
+
+ int64_t playTimeUs = 0;
+ status_t err = sync->getPlayTimeForPendingAudioFrames(&playTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ }
+ return (jlong)playTimeUs;
+}
+
+static void
+android_media_MediaSync_setSyncSettings(JNIEnv *env, jobject thiz, jobject settings)
+{
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ SyncSettings scs;
+ scs.fillFromJobject(env, gSyncSettingsFields, settings);
+ ALOGV("setSyncSettings: %d:%d %d:%d %d:%f %d:%f",
+ scs.syncSourceSet, scs.syncSource,
+ scs.audioAdjustModeSet, scs.audioAdjustMode,
+ scs.toleranceSet, scs.tolerance,
+ scs.frameRateSet, scs.frameRate);
+
+ // TODO: pass sync settings to mediasync when it supports it
+}
+
+static jobject
+android_media_MediaSync_getSyncSettings(JNIEnv *env, jobject thiz)
+{
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ SyncSettings scs;
+ scs.syncSource = 0; // SYNC_SOURCE_DEFAULT
+ scs.audioAdjustMode = 0; // AUDIO_ADJUST_MODE_DEFAULT
+ scs.tolerance = 0.f;
+ scs.frameRate = 0.f;
+
+ // TODO: get this from mediaplayer when it supports it
+ // process_media_player_call(
+ // env, thiz, mp->getSyncSettings(&scs), NULL, NULL);
+ ALOGV("getSyncSettings: %d %d %f %f",
+ scs.syncSource, scs.audioAdjustMode, scs.tolerance, scs.frameRate);
+
+ scs.syncSourceSet = true;
+ scs.audioAdjustModeSet = true;
+ scs.toleranceSet = true;
+ scs.frameRateSet = false;
+
+ return scs.asJobject(env, gSyncSettingsFields);
+}
+
+static void android_media_MediaSync_native_init(JNIEnv *env) {
+ ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J");
+ CHECK(gFields.context != NULL);
+
+ clazz.reset(env->FindClass("android/media/MediaTimestamp"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.mediaTimestampMediaTimeUsID =
+ env->GetFieldID(clazz.get(), "mediaTimeUs", "J");
+ CHECK(gFields.mediaTimestampMediaTimeUsID != NULL);
+
+ gFields.mediaTimestampNanoTimeID =
+ env->GetFieldID(clazz.get(), "nanoTime", "J");
+ CHECK(gFields.mediaTimestampNanoTimeID != NULL);
+
+ gFields.mediaTimestampClockRateID =
+ env->GetFieldID(clazz.get(), "clockRate", "F");
+ CHECK(gFields.mediaTimestampClockRateID != NULL);
+
+ gSyncSettingsFields.init(env);
+}
+
+static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = new JMediaSync();
+
+ setMediaSync(env, thiz, sync);
+}
+
+static void android_media_MediaSync_native_setPlaybackRate(
+ JNIEnv *env, jobject thiz, jfloat rate) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ status_t err = sync->setPlaybackRate(rate);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+}
+
+static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) {
+ android_media_MediaSync_release(env, thiz);
+}
+
+static JNINativeMethod gMethods[] = {
+ { "native_configureSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaSync_native_configureSurface },
+
+ { "native_configureAudioTrack",
+ "(Landroid/media/AudioTrack;I)V",
+ (void *)android_media_MediaSync_native_configureAudioTrack },
+
+ { "createInputSurface", "()Landroid/view/Surface;",
+ (void *)android_media_MediaSync_createInputSurface },
+
+ { "native_updateQueuedAudioData",
+ "(IJ)V",
+ (void *)android_media_MediaSync_native_updateQueuedAudioData },
+
+ { "native_getTimestamp",
+ "(Landroid/media/MediaTimestamp;)Z",
+ (void *)android_media_MediaSync_native_getTimestamp },
+
+ { "native_getPlayTimeForPendingAudioFrames",
+ "()J",
+ (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames },
+
+ { "native_init", "()V", (void *)android_media_MediaSync_native_init },
+
+ { "native_setup", "()V", (void *)android_media_MediaSync_native_setup },
+
+ { "native_release", "()V", (void *)android_media_MediaSync_release },
+
+ { "native_setPlaybackRate", "(F)V", (void *)android_media_MediaSync_native_setPlaybackRate },
+
+ { "setSyncSettings", "(Landroid/media/SyncSettings;)V", (void *)android_media_MediaSync_setSyncSettings},
+
+ { "getSyncSettings", "()Landroid/media/SyncSettings;", (void *)android_media_MediaSync_getSyncSettings},
+
+ { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize },
+};
+
+int register_android_media_MediaSync(JNIEnv *env) {
+ return AndroidRuntime::registerNativeMethods(
+ env, "android/media/MediaSync", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MediaSync.h b/media/jni/android_media_MediaSync.h
new file mode 100644
index 0000000..cf81a72
--- /dev/null
+++ b/media/jni/android_media_MediaSync.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIASYNC_H_
+#define _ANDROID_MEDIA_MEDIASYNC_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class AudioTrack;
+struct IGraphicBufferProducer;
+struct MediaClock;
+class MediaSync;
+
+struct JMediaSync : public RefBase {
+ JMediaSync();
+
+ status_t configureSurface(const sp<IGraphicBufferProducer> &bufferProducer);
+ status_t configureAudioTrack(
+ const sp<AudioTrack> &audioTrack, int32_t nativeSampleRateInHz);
+
+ status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
+
+ status_t updateQueuedAudioData(int sizeInBytes, int64_t presentationTimeUs);
+
+ status_t setPlaybackRate(float rate);
+
+ status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
+
+ sp<const MediaClock> getMediaClock();
+
+protected:
+ virtual ~JMediaSync();
+
+private:
+ sp<MediaSync> mSync;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaSync);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIASYNC_H_
diff --git a/media/jni/android_media_PlaybackSettings.h b/media/jni/android_media_PlaybackSettings.h
new file mode 100644
index 0000000..1f4f256
--- /dev/null
+++ b/media/jni/android_media_PlaybackSettings.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
+#define _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
+
+#include <media/AudioResamplerPublic.h>
+
+namespace android {
+
+// This entire class is inline as it is used from both core and media
+struct PlaybackSettings {
+ AudioPlaybackRate audioRate;
+ bool speedSet;
+ bool pitchSet;
+ bool audioFallbackModeSet;
+ bool audioStretchModeSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID speed;
+ jfieldID pitch;
+ jfieldID audio_fallback_mode;
+ jfieldID audio_stretch_mode;
+ jfieldID set;
+ jint set_speed;
+ jint set_pitch;
+ jint set_audio_fallback_mode;
+ jint set_audio_stretch_mode;
+
+ void init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/PlaybackSettings");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ speed = env->GetFieldID(clazz, "mSpeed", "F");
+ pitch = env->GetFieldID(clazz, "mPitch", "F");
+ audio_fallback_mode = env->GetFieldID(clazz, "mAudioFallbackMode", "I");
+ audio_stretch_mode = env->GetFieldID(clazz, "mAudioStretchMode", "I");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_speed =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SPEED", "I"));
+ set_pitch =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_PITCH", "I"));
+ set_audio_fallback_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_FALLBACK_MODE", "I"));
+ set_audio_stretch_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_STRETCH_MODE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+ }
+
+ void exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+ }
+ };
+
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings) {
+ audioRate.mSpeed = env->GetFloatField(settings, fields.speed);
+ audioRate.mPitch = env->GetFloatField(settings, fields.pitch);
+ audioRate.mFallbackMode =
+ (AudioTimestretchFallbackMode)env->GetIntField(settings, fields.audio_fallback_mode);
+ audioRate.mStretchMode =
+ (AudioTimestretchStretchMode)env->GetIntField(settings, fields.audio_stretch_mode);
+ int set = env->GetIntField(settings, fields.set);
+
+ speedSet = set & fields.set_speed;
+ pitchSet = set & fields.set_pitch;
+ audioFallbackModeSet = set & fields.set_audio_fallback_mode;
+ audioStretchModeSet = set & fields.set_audio_stretch_mode;
+ }
+
+ jobject asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject settings = env->NewObject(fields.clazz, fields.constructID);
+ if (settings == NULL) {
+ return NULL;
+ }
+ env->SetFloatField(settings, fields.speed, (jfloat)audioRate.mSpeed);
+ env->SetFloatField(settings, fields.pitch, (jfloat)audioRate.mPitch);
+ env->SetIntField(settings, fields.audio_fallback_mode, (jint)audioRate.mFallbackMode);
+ env->SetIntField(settings, fields.audio_stretch_mode, (jint)audioRate.mStretchMode);
+ env->SetIntField(
+ settings, fields.set,
+ (speedSet ? fields.set_speed : 0)
+ | (pitchSet ? fields.set_pitch : 0)
+ | (audioFallbackModeSet ? fields.set_audio_fallback_mode : 0)
+ | (audioStretchModeSet ? fields.set_audio_stretch_mode : 0));
+
+ return settings;
+ }
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
diff --git a/media/jni/android_media_SyncSettings.cpp b/media/jni/android_media_SyncSettings.cpp
new file mode 100644
index 0000000..2f0605e
--- /dev/null
+++ b/media/jni/android_media_SyncSettings.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "android_media_SyncSettings.h"
+
+#include "JNIHelp.h"
+
+namespace android {
+
+void SyncSettings::fields_t::init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/SyncSettings");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ sync_source = env->GetFieldID(clazz, "mSyncSource", "I");
+ audio_adjust_mode = env->GetFieldID(clazz, "mAudioAdjustMode", "I");
+ tolerance = env->GetFieldID(clazz, "mTolerance", "F");
+ frame_rate = env->GetFieldID(clazz, "mFrameRate", "F");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_sync_source =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SYNC_SOURCE", "I"));
+ set_audio_adjust_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_ADJUST_MODE", "I"));
+ set_tolerance =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_TOLERANCE", "I"));
+ set_frame_rate =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_FRAME_RATE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+}
+
+void SyncSettings::fields_t::exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+}
+
+void SyncSettings::fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings) {
+ syncSource = env->GetIntField(settings, fields.sync_source);
+ audioAdjustMode = env->GetIntField(settings, fields.audio_adjust_mode);
+ tolerance = env->GetFloatField(settings, fields.tolerance);
+ frameRate = env->GetFloatField(settings, fields.frame_rate);
+ int set = env->GetIntField(settings, fields.set);
+
+ syncSourceSet = set & fields.set_sync_source;
+ audioAdjustModeSet = set & fields.set_audio_adjust_mode;
+ toleranceSet = set & fields.set_tolerance;
+ frameRateSet = set & fields.set_frame_rate;
+}
+
+jobject SyncSettings::asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject settings = env->NewObject(fields.clazz, fields.constructID);
+ if (settings == NULL) {
+ return NULL;
+ }
+ env->SetIntField(settings, fields.sync_source, (jint)syncSource);
+ env->SetIntField(settings, fields.audio_adjust_mode, (jint)audioAdjustMode);
+ env->SetFloatField(settings, fields.tolerance, (jfloat)tolerance);
+ env->SetFloatField(settings, fields.frame_rate, (jfloat)frameRate);
+ env->SetIntField(
+ settings, fields.set,
+ (syncSourceSet ? fields.set_sync_source : 0)
+ | (audioAdjustModeSet ? fields.set_audio_adjust_mode : 0)
+ | (toleranceSet ? fields.set_tolerance : 0)
+ | (frameRateSet ? fields.set_frame_rate : 0));
+
+ return settings;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_SyncSettings.h b/media/jni/android_media_SyncSettings.h
new file mode 100644
index 0000000..586533f
--- /dev/null
+++ b/media/jni/android_media_SyncSettings.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_SYNC_SETTINGS_H_
+#define _ANDROID_MEDIA_SYNC_SETTINGS_H_
+
+#include "jni.h"
+
+namespace android {
+
+struct SyncSettings {
+ // keep this here until it is implemented
+ int syncSource;
+ int audioAdjustMode;
+ float tolerance;
+ float frameRate;
+
+ bool syncSourceSet;
+ bool audioAdjustModeSet;
+ bool toleranceSet;
+ bool frameRateSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID sync_source;
+ jfieldID audio_adjust_mode;
+ jfieldID tolerance;
+ jfieldID frame_rate;
+ jfieldID set;
+ jint set_sync_source;
+ jint set_audio_adjust_mode;
+ jint set_tolerance;
+ jint set_frame_rate;
+
+ // initializes fields
+ void init(JNIEnv *env);
+
+ // releases global references held
+ void exit(JNIEnv *env);
+ };
+
+ // fills this from an android.media.SyncSettings object
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings);
+
+ // returns this as a android.media.SyncSettings object
+ jobject asJobject(JNIEnv *env, const fields_t& fields);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_SYNC_SETTINGS_H_
diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk
index 71ab013..2476056 100644
--- a/media/jni/soundpool/Android.mk
+++ b/media/jni/soundpool/Android.mk
@@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- android_media_SoundPool_SoundPoolImpl.cpp \
+ android_media_SoundPool.cpp \
SoundPool.cpp \
SoundPoolThread.cpp
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index 10233f3..25c6154 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -716,7 +716,15 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
}
#endif
- if (!mAudioTrack.get() || mPrevSampleID != sample->sampleID()) {
+ // check if the existing track has the same sample id.
+ if (mAudioTrack != 0 && mPrevSampleID == sample->sampleID()) {
+ // the sample rate may fail to change if the audio track is a fast track.
+ if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
+ newTrack = mAudioTrack;
+ ALOGV("reusing track %p for sample %d", mAudioTrack.get(), sample->sampleID());
+ }
+ }
+ if (newTrack == 0) {
// mToggle toggles each time a track is started on a given channel.
// The toggle is concatenated with the SoundChannel address and passed to AudioTrack
// as callback user data. This enables the detection of callbacks received from the old
@@ -746,10 +754,6 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
mToggle = toggle;
mAudioTrack = newTrack;
ALOGV("using new track %p for sample %d", newTrack.get(), sample->sampleID());
- } else {
- newTrack = mAudioTrack;
- newTrack->setSampleRate(sampleRate);
- ALOGV("reusing track %p for sample %d", mAudioTrack.get(), sample->sampleID());
}
newTrack->setVolume(leftVolume, rightVolume);
newTrack->setLoop(0, frameCount, loop);
diff --git a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp b/media/jni/soundpool/android_media_SoundPool.cpp
index b2333f8..fc4cf05 100644
--- a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp
+++ b/media/jni/soundpool/android_media_SoundPool.cpp
@@ -47,10 +47,10 @@ static audio_attributes_fields_t javaAudioAttrFields;
// ----------------------------------------------------------------------------
static jint
-android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
+android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
jlong offset, jlong length, jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_load_FD");
+ ALOGV("android_media_SoundPool_load_FD");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->load(jniGetFDFromFileDescriptor(env, fileDescriptor),
@@ -58,104 +58,104 @@ android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject
}
static jboolean
-android_media_SoundPool_SoundPoolImpl_unload(JNIEnv *env, jobject thiz, jint sampleID) {
- ALOGV("android_media_SoundPool_SoundPoolImpl_unload\n");
+android_media_SoundPool_unload(JNIEnv *env, jobject thiz, jint sampleID) {
+ ALOGV("android_media_SoundPool_unload\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return JNI_FALSE;
return ap->unload(sampleID) ? JNI_TRUE : JNI_FALSE;
}
static jint
-android_media_SoundPool_SoundPoolImpl_play(JNIEnv *env, jobject thiz, jint sampleID,
+android_media_SoundPool_play(JNIEnv *env, jobject thiz, jint sampleID,
jfloat leftVolume, jfloat rightVolume, jint priority, jint loop,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_play\n");
+ ALOGV("android_media_SoundPool_play\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->play(sampleID, leftVolume, rightVolume, priority, loop, rate);
}
static void
-android_media_SoundPool_SoundPoolImpl_pause(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_pause(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_pause");
+ ALOGV("android_media_SoundPool_pause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->pause(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_resume(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_resume(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_resume");
+ ALOGV("android_media_SoundPool_resume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->resume(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_autoPause(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoPause(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoPause");
+ ALOGV("android_media_SoundPool_autoPause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoPause();
}
static void
-android_media_SoundPool_SoundPoolImpl_autoResume(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoResume(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoResume");
+ ALOGV("android_media_SoundPool_autoResume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoResume();
}
static void
-android_media_SoundPool_SoundPoolImpl_stop(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_stop(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_stop");
+ ALOGV("android_media_SoundPool_stop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->stop(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_setVolume(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setVolume(JNIEnv *env, jobject thiz, jint channelID,
jfloat leftVolume, jfloat rightVolume)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setVolume");
+ ALOGV("android_media_SoundPool_setVolume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setVolume(channelID, (float) leftVolume, (float) rightVolume);
}
static void
-android_media_SoundPool_SoundPoolImpl_setPriority(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setPriority(JNIEnv *env, jobject thiz, jint channelID,
jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setPriority");
+ ALOGV("android_media_SoundPool_setPriority");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setPriority(channelID, (int) priority);
}
static void
-android_media_SoundPool_SoundPoolImpl_setLoop(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setLoop(JNIEnv *env, jobject thiz, jint channelID,
int loop)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setLoop");
+ ALOGV("android_media_SoundPool_setLoop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setLoop(channelID, loop);
}
static void
-android_media_SoundPool_SoundPoolImpl_setRate(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setRate(JNIEnv *env, jobject thiz, jint channelID,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setRate");
+ ALOGV("android_media_SoundPool_setRate");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setRate(channelID, (float) rate);
@@ -169,7 +169,7 @@ static void android_media_callback(SoundPoolEvent event, SoundPool* soundPool, v
}
static jint
-android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
+android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
jint maxChannels, jobject jaa)
{
if (jaa == 0) {
@@ -191,7 +191,7 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
(audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType);
paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
- ALOGV("android_media_SoundPool_SoundPoolImpl_native_setup");
+ ALOGV("android_media_SoundPool_native_setup");
SoundPool *ap = new SoundPool(maxChannels, paa);
if (ap == NULL) {
return -1;
@@ -211,9 +211,9 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
}
static void
-android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
+android_media_SoundPool_release(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_release");
+ ALOGV("android_media_SoundPool_release");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap != NULL) {
@@ -236,63 +236,63 @@ android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
static JNINativeMethod gMethods[] = {
{ "_load",
"(Ljava/io/FileDescriptor;JJI)I",
- (void *)android_media_SoundPool_SoundPoolImpl_load_FD
+ (void *)android_media_SoundPool_load_FD
},
{ "unload",
"(I)Z",
- (void *)android_media_SoundPool_SoundPoolImpl_unload
+ (void *)android_media_SoundPool_unload
},
{ "_play",
"(IFFIIF)I",
- (void *)android_media_SoundPool_SoundPoolImpl_play
+ (void *)android_media_SoundPool_play
},
{ "pause",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_pause
+ (void *)android_media_SoundPool_pause
},
{ "resume",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_resume
+ (void *)android_media_SoundPool_resume
},
{ "autoPause",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoPause
+ (void *)android_media_SoundPool_autoPause
},
{ "autoResume",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoResume
+ (void *)android_media_SoundPool_autoResume
},
{ "stop",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_stop
+ (void *)android_media_SoundPool_stop
},
{ "_setVolume",
"(IFF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setVolume
+ (void *)android_media_SoundPool_setVolume
},
{ "setPriority",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setPriority
+ (void *)android_media_SoundPool_setPriority
},
{ "setLoop",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setLoop
+ (void *)android_media_SoundPool_setLoop
},
{ "setRate",
"(IF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setRate
+ (void *)android_media_SoundPool_setRate
},
{ "native_setup",
"(Ljava/lang/Object;ILjava/lang/Object;)I",
- (void*)android_media_SoundPool_SoundPoolImpl_native_setup
+ (void*)android_media_SoundPool_native_setup
},
{ "release",
"()V",
- (void*)android_media_SoundPool_SoundPoolImpl_release
+ (void*)android_media_SoundPool_release
}
};
-static const char* const kClassPathName = "android/media/SoundPool$SoundPoolImpl";
+static const char* const kClassPathName = "android/media/SoundPool";
jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
{
@@ -314,14 +314,14 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
fields.mNativeContext = env->GetFieldID(clazz, "mNativeContext", "J");
if (fields.mNativeContext == NULL) {
- ALOGE("Can't find SoundPoolImpl.mNativeContext");
+ ALOGE("Can't find SoundPool.mNativeContext");
return result;
}
fields.mPostEvent = env->GetStaticMethodID(clazz, "postEventFromNative",
"(Ljava/lang/Object;IIILjava/lang/Object;)V");
if (fields.mPostEvent == NULL) {
- ALOGE("Can't find android/media/SoundPoolImpl.postEventFromNative");
+ ALOGE("Can't find android/media/SoundPool.postEventFromNative");
return result;
}
diff --git a/media/packages/BluetoothMidiService/Android.mk b/media/packages/BluetoothMidiService/Android.mk
new file mode 100644
index 0000000..2c9c3c5
--- /dev/null
+++ b/media/packages/BluetoothMidiService/Android.mk
@@ -0,0 +1,11 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := $(call all-subdir-java-files)
+
+LOCAL_PACKAGE_NAME := BluetoothMidiService
+LOCAL_CERTIFICATE := platform
+
+include $(BUILD_PACKAGE)
diff --git a/media/packages/BluetoothMidiService/AndroidManifest.xml b/media/packages/BluetoothMidiService/AndroidManifest.xml
new file mode 100644
index 0000000..15aa581
--- /dev/null
+++ b/media/packages/BluetoothMidiService/AndroidManifest.xml
@@ -0,0 +1,17 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.android.bluetoothmidiservice"
+ >
+
+ <uses-feature android:name="android.hardware.bluetooth_le" android:required="true"/>
+ <uses-feature android:name="android.software.midi" android:required="true"/>
+ <uses-permission android:name="android.permission.BLUETOOTH"/>
+
+ <application
+ android:label="@string/app_name">
+ <service android:name="BluetoothMidiService">
+ <intent-filter>
+ <action android:name="android.media.midi.BluetoothMidiService" />
+ </intent-filter>
+ </service>
+ </application>
+</manifest>
diff --git a/media/packages/BluetoothMidiService/res/values/strings.xml b/media/packages/BluetoothMidiService/res/values/strings.xml
new file mode 100644
index 0000000..c98e56c
--- /dev/null
+++ b/media/packages/BluetoothMidiService/res/values/strings.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <string name="app_name">Bluetooth MIDI Service</string>
+</resources>
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java
new file mode 100644
index 0000000..8d194e5
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiDevice.java
@@ -0,0 +1,276 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothGatt;
+import android.bluetooth.BluetoothGattCallback;
+import android.bluetooth.BluetoothGattCharacteristic;
+import android.bluetooth.BluetoothGattDescriptor;
+import android.bluetooth.BluetoothGattService;
+import android.bluetooth.BluetoothProfile;
+import android.content.Context;
+import android.media.midi.MidiReceiver;
+import android.media.midi.MidiManager;
+import android.media.midi.MidiDeviceServer;
+import android.media.midi.MidiDeviceInfo;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.util.Log;
+
+import com.android.internal.midi.MidiEventScheduler;
+import com.android.internal.midi.MidiEventScheduler.MidiEvent;
+
+import libcore.io.IoUtils;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Class used to implement a Bluetooth MIDI device.
+ */
+public final class BluetoothMidiDevice {
+
+ private static final String TAG = "BluetoothMidiDevice";
+
+ private static final int MAX_PACKET_SIZE = 20;
+
+ // Bluetooth MIDI Gatt service UUID
+ private static final UUID MIDI_SERVICE = UUID.fromString(
+ "03B80E5A-EDE8-4B33-A751-6CE34EC4C700");
+ // Bluetooth MIDI Gatt characteristic UUID
+ private static final UUID MIDI_CHARACTERISTIC = UUID.fromString(
+ "7772E5DB-3868-4112-A1A9-F2669D106BF3");
+ // Descriptor UUID for enabling characteristic changed notifications
+ private static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString(
+ "00002902-0000-1000-8000-00805f9b34fb");
+
+ private final BluetoothDevice mBluetoothDevice;
+ private final BluetoothMidiService mService;
+ private final MidiManager mMidiManager;
+ private MidiReceiver mOutputReceiver;
+ private final MidiEventScheduler mEventScheduler = new MidiEventScheduler();
+
+ private MidiDeviceServer mDeviceServer;
+ private BluetoothGatt mBluetoothGatt;
+
+ private BluetoothGattCharacteristic mCharacteristic;
+
+ // PacketReceiver for receiving formatted packets from our BluetoothPacketEncoder
+ private final PacketReceiver mPacketReceiver = new PacketReceiver();
+
+ private final BluetoothPacketEncoder mPacketEncoder
+ = new BluetoothPacketEncoder(mPacketReceiver, MAX_PACKET_SIZE);
+
+ private final BluetoothPacketDecoder mPacketDecoder
+ = new BluetoothPacketDecoder(MAX_PACKET_SIZE);
+
+ private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
+ @Override
+ public void onConnectionStateChange(BluetoothGatt gatt, int status,
+ int newState) {
+ String intentAction;
+ if (newState == BluetoothProfile.STATE_CONNECTED) {
+ Log.i(TAG, "Connected to GATT server.");
+ Log.i(TAG, "Attempting to start service discovery:" +
+ mBluetoothGatt.discoverServices());
+ } else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
+ Log.i(TAG, "Disconnected from GATT server.");
+ // FIXME synchronize?
+ close();
+ }
+ }
+
+ @Override
+ public void onServicesDiscovered(BluetoothGatt gatt, int status) {
+ if (status == BluetoothGatt.GATT_SUCCESS) {
+ List<BluetoothGattService> services = mBluetoothGatt.getServices();
+ for (BluetoothGattService service : services) {
+ if (MIDI_SERVICE.equals(service.getUuid())) {
+ Log.d(TAG, "found MIDI_SERVICE");
+ List<BluetoothGattCharacteristic> characteristics
+ = service.getCharacteristics();
+ for (BluetoothGattCharacteristic characteristic : characteristics) {
+ if (MIDI_CHARACTERISTIC.equals(characteristic.getUuid())) {
+ Log.d(TAG, "found MIDI_CHARACTERISTIC");
+ mCharacteristic = characteristic;
+
+ // Specification says to read the characteristic first and then
+ // switch to receiving notifications
+ mBluetoothGatt.readCharacteristic(characteristic);
+ break;
+ }
+ }
+ break;
+ }
+ }
+ } else {
+ Log.w(TAG, "onServicesDiscovered received: " + status);
+ // FIXME - report error back to client?
+ }
+ }
+
+ @Override
+ public void onCharacteristicRead(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic,
+ int status) {
+ Log.d(TAG, "onCharacteristicRead " + status);
+
+ // switch to receiving notifications after initial characteristic read
+ mBluetoothGatt.setCharacteristicNotification(characteristic, true);
+
+ BluetoothGattDescriptor descriptor = characteristic.getDescriptor(
+ CLIENT_CHARACTERISTIC_CONFIG);
+ // FIXME null check
+ descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE);
+ mBluetoothGatt.writeDescriptor(descriptor);
+ }
+
+ @Override
+ public void onCharacteristicWrite(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic,
+ int status) {
+ Log.d(TAG, "onCharacteristicWrite " + status);
+ mPacketEncoder.writeComplete();
+ }
+
+ @Override
+ public void onCharacteristicChanged(BluetoothGatt gatt,
+ BluetoothGattCharacteristic characteristic) {
+// logByteArray("Received ", characteristic.getValue(), 0,
+// characteristic.getValue().length);
+ mPacketDecoder.decodePacket(characteristic.getValue(), mOutputReceiver);
+ }
+ };
+
+ // This receives MIDI data that has already been passed through our MidiEventScheduler
+ // and has been normalized by our MidiFramer.
+
+ private class PacketReceiver implements PacketEncoder.PacketReceiver {
+ // buffers of every possible packet size
+ private final byte[][] mWriteBuffers;
+
+ public PacketReceiver() {
+ // Create buffers of every possible packet size
+ mWriteBuffers = new byte[MAX_PACKET_SIZE + 1][];
+ for (int i = 0; i <= MAX_PACKET_SIZE; i++) {
+ mWriteBuffers[i] = new byte[i];
+ }
+ }
+
+ @Override
+ public void writePacket(byte[] buffer, int count) {
+ if (mCharacteristic == null) {
+ Log.w(TAG, "not ready to send packet yet");
+ return;
+ }
+ byte[] writeBuffer = mWriteBuffers[count];
+ System.arraycopy(buffer, 0, writeBuffer, 0, count);
+ mCharacteristic.setValue(writeBuffer);
+// logByteArray("Sent ", mCharacteristic.getValue(), 0,
+// mCharacteristic.getValue().length);
+ mBluetoothGatt.writeCharacteristic(mCharacteristic);
+ }
+ }
+
+ public BluetoothMidiDevice(Context context, BluetoothDevice device,
+ BluetoothMidiService service) {
+ mBluetoothDevice = device;
+ mService = service;
+
+ mBluetoothGatt = mBluetoothDevice.connectGatt(context, false, mGattCallback);
+
+ mMidiManager = (MidiManager)context.getSystemService(Context.MIDI_SERVICE);
+
+ Bundle properties = new Bundle();
+ properties.putString(MidiDeviceInfo.PROPERTY_NAME, mBluetoothGatt.getDevice().getName());
+ properties.putParcelable(MidiDeviceInfo.PROPERTY_BLUETOOTH_DEVICE,
+ mBluetoothGatt.getDevice());
+
+ MidiReceiver[] inputPortReceivers = new MidiReceiver[1];
+ inputPortReceivers[0] = mEventScheduler.getReceiver();
+
+ mDeviceServer = mMidiManager.createDeviceServer(inputPortReceivers, 1,
+ null, null, properties, MidiDeviceInfo.TYPE_BLUETOOTH, null);
+
+ mOutputReceiver = mDeviceServer.getOutputPortReceivers()[0];
+
+ // This thread waits for outgoing messages from our MidiEventScheduler
+ // And forwards them to our MidiFramer to be prepared to send via Bluetooth.
+ new Thread("BluetoothMidiDevice " + mBluetoothDevice) {
+ @Override
+ public void run() {
+ while (true) {
+ MidiEvent event;
+ try {
+ event = (MidiEvent)mEventScheduler.waitNextEvent();
+ } catch (InterruptedException e) {
+ // try again
+ continue;
+ }
+ if (event == null) {
+ break;
+ }
+ try {
+ mPacketEncoder.sendWithTimestamp(event.data, 0, event.count,
+ event.getTimestamp());
+ } catch (IOException e) {
+ Log.e(TAG, "mPacketAccumulator.sendWithTimestamp failed", e);
+ }
+ mEventScheduler.addEventToPool(event);
+ }
+ Log.d(TAG, "BluetoothMidiDevice thread exit");
+ }
+ }.start();
+ }
+
+ void close() {
+ mEventScheduler.close();
+ if (mDeviceServer != null) {
+ IoUtils.closeQuietly(mDeviceServer);
+ mDeviceServer = null;
+ mService.deviceClosed(mBluetoothDevice);
+ }
+ if (mBluetoothGatt != null) {
+ mBluetoothGatt.close();
+ mBluetoothGatt = null;
+ }
+ }
+
+ public IBinder getBinder() {
+ return mDeviceServer.asBinder();
+ }
+
+ private static void logByteArray(String prefix, byte[] value, int offset, int count) {
+ StringBuilder builder = new StringBuilder(prefix);
+ for (int i = offset; i < count; i++) {
+ String hex = Integer.toHexString(value[i]);
+ int length = hex.length();
+ if (length == 1) {
+ hex = "0x" + hex;
+ } else {
+ hex = hex.substring(length - 2, length);
+ }
+ builder.append(hex);
+ if (i != value.length - 1) {
+ builder.append(", ");
+ }
+ }
+ Log.d(TAG, builder.toString());
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java
new file mode 100644
index 0000000..fbde2b4
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothMidiService.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.app.Service;
+import android.bluetooth.BluetoothDevice;
+import android.content.Intent;
+import android.media.midi.MidiManager;
+import android.os.IBinder;
+import android.util.Log;
+
+import java.util.HashMap;
+
+public class BluetoothMidiService extends Service {
+ private static final String TAG = "BluetoothMidiService";
+
+ // BluetoothMidiDevices keyed by BluetoothDevice
+ private final HashMap<BluetoothDevice,BluetoothMidiDevice> mDeviceServerMap
+ = new HashMap<BluetoothDevice,BluetoothMidiDevice>();
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ if (MidiManager.BLUETOOTH_MIDI_SERVICE_INTENT.equals(intent.getAction())) {
+ BluetoothDevice bluetoothDevice = (BluetoothDevice)intent.getParcelableExtra("device");
+ if (bluetoothDevice == null) {
+ Log.e(TAG, "no BluetoothDevice in onBind intent");
+ return null;
+ }
+
+ BluetoothMidiDevice device;
+ synchronized (mDeviceServerMap) {
+ device = mDeviceServerMap.get(bluetoothDevice);
+ if (device == null) {
+ device = new BluetoothMidiDevice(this, bluetoothDevice, this);
+ }
+ }
+ return device.getBinder();
+ }
+ return null;
+ }
+
+ void deviceClosed(BluetoothDevice device) {
+ synchronized (mDeviceServerMap) {
+ mDeviceServerMap.remove(device);
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java
new file mode 100644
index 0000000..1bce9fb
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketDecoder.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+import android.util.Log;
+
+import java.io.IOException;
+
+/**
+ * This is an abstract base class that decodes a packet buffer and passes it to a
+ * {@link android.media.midi.MidiReceiver}
+ */
+public class BluetoothPacketDecoder extends PacketDecoder {
+
+ private static final String TAG = "BluetoothPacketDecoder";
+
+ private final byte[] mBuffer;
+ private MidiBtleTimeTracker mTimeTracker;
+
+ private final int TIMESTAMP_MASK_HIGH = 0x1F80;
+ private final int TIMESTAMP_MASK_LOW = 0x7F;
+ private final int HEADER_TIMESTAMP_MASK = 0x3F;
+
+ public BluetoothPacketDecoder(int maxPacketSize) {
+ mBuffer = new byte[maxPacketSize];
+ }
+
+ @Override
+ public void decodePacket(byte[] buffer, MidiReceiver receiver) {
+ if (mTimeTracker == null) {
+ mTimeTracker = new MidiBtleTimeTracker(System.nanoTime());
+ }
+
+ int length = buffer.length;
+
+ // NOTE his code allows running status across packets,
+ // although the specification does not allow that.
+
+ if (length < 1) {
+ Log.e(TAG, "empty packet");
+ return;
+ }
+ byte header = buffer[0];
+ if ((header & 0xC0) != 0x80) {
+ Log.e(TAG, "packet does not start with header");
+ return;
+ }
+
+ // shift bits 0 - 5 to bits 7 - 12
+ int highTimestamp = (header & HEADER_TIMESTAMP_MASK) << 7;
+ boolean lastWasTimestamp = false;
+ int dataCount = 0;
+ int previousLowTimestamp = 0;
+ long nanoTimestamp = 0;
+ int currentTimestamp = 0;
+
+ // iterate through the rest of the packet, separating MIDI data from timestamps
+ for (int i = 1; i < buffer.length; i++) {
+ byte b = buffer[i];
+
+ if ((b & 0x80) != 0 && !lastWasTimestamp) {
+ lastWasTimestamp = true;
+ int lowTimestamp = b & TIMESTAMP_MASK_LOW;
+ if (lowTimestamp < previousLowTimestamp) {
+ highTimestamp = (highTimestamp + 0x0080) & TIMESTAMP_MASK_HIGH;
+ }
+ previousLowTimestamp = lowTimestamp;
+
+ int newTimestamp = highTimestamp | lowTimestamp;
+ if (newTimestamp != currentTimestamp) {
+ if (dataCount > 0) {
+ // send previous message separately since it has a different timestamp
+ try {
+ receiver.sendWithTimestamp(mBuffer, 0, dataCount, nanoTimestamp);
+ } catch (IOException e) {
+ // ???
+ }
+ dataCount = 0;
+ }
+ currentTimestamp = newTimestamp;
+ }
+
+ // calculate nanoTimestamp
+ long now = System.nanoTime();
+ nanoTimestamp = mTimeTracker.convertTimestampToNanotime(currentTimestamp, now);
+ } else {
+ lastWasTimestamp = false;
+ mBuffer[dataCount++] = b;
+ }
+ }
+
+ if (dataCount > 0) {
+ try {
+ receiver.sendWithTimestamp(mBuffer, 0, dataCount, nanoTimestamp);
+ } catch (IOException e) {
+ // ???
+ }
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java
new file mode 100644
index 0000000..463edcf
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/BluetoothPacketEncoder.java
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+import com.android.internal.midi.MidiConstants;
+import com.android.internal.midi.MidiFramer;
+
+import java.io.IOException;
+
+/**
+ * This class accumulates MIDI messages to form a MIDI packet.
+ */
+public class BluetoothPacketEncoder extends PacketEncoder {
+
+ private static final String TAG = "BluetoothPacketEncoder";
+
+ private static final long MILLISECOND_NANOS = 1000000L;
+
+ // mask for generating 13 bit timestamps
+ private static final int MILLISECOND_MASK = 0x1FFF;
+
+ private final PacketReceiver mPacketReceiver;
+
+ // buffer for accumulating messages to write
+ private final byte[] mAccumulationBuffer;
+ // number of bytes currently in mAccumulationBuffer
+ private int mAccumulatedBytes;
+ // timestamp for first message in current packet
+ private int mPacketTimestamp;
+ // current running status, or zero if none
+ private int mRunningStatus;
+
+ private boolean mWritePending;
+
+ private final Object mLock = new Object();
+
+ // This receives normalized data from mMidiFramer and accumulates it into a packet buffer
+ private final MidiReceiver mFramedDataReceiver = new MidiReceiver() {
+ @Override
+ public void onReceive(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+
+ int milliTimestamp = (int)(timestamp / MILLISECOND_NANOS) & MILLISECOND_MASK;
+ int status = msg[0] & 0xFF;
+
+ synchronized (mLock) {
+ boolean needsTimestamp = (milliTimestamp != mPacketTimestamp);
+ int bytesNeeded = count;
+ if (needsTimestamp) bytesNeeded++; // add one for timestamp byte
+ if (status == mRunningStatus) bytesNeeded--; // subtract one for status byte
+
+ if (mAccumulatedBytes + bytesNeeded > mAccumulationBuffer.length) {
+ // write out our data if there is no more room
+ // if necessary, block until previous packet is sent
+ flushLocked(true);
+ }
+
+ // write header if we are starting a new packet
+ if (mAccumulatedBytes == 0) {
+ // header byte with timestamp bits 7 - 12
+ mAccumulationBuffer[mAccumulatedBytes++] = (byte)(0x80 | (milliTimestamp >> 7));
+ mPacketTimestamp = milliTimestamp;
+ needsTimestamp = true;
+ }
+
+ // write new timestamp byte and status byte if necessary
+ if (needsTimestamp) {
+ // timestamp byte with bits 0 - 6 of timestamp
+ mAccumulationBuffer[mAccumulatedBytes++] =
+ (byte)(0x80 | (milliTimestamp & 0x7F));
+ mPacketTimestamp = milliTimestamp;
+ }
+
+ if (status != mRunningStatus) {
+ mAccumulationBuffer[mAccumulatedBytes++] = (byte)status;
+ if (MidiConstants.allowRunningStatus(status)) {
+ mRunningStatus = status;
+ } else if (MidiConstants.allowRunningStatus(status)) {
+ mRunningStatus = 0;
+ }
+ }
+
+ // now copy data bytes
+ int dataLength = count - 1;
+ System.arraycopy(msg, 1, mAccumulationBuffer, mAccumulatedBytes, dataLength);
+ // FIXME - handle long SysEx properly
+ mAccumulatedBytes += dataLength;
+
+ // write the packet if possible, but do not block
+ flushLocked(false);
+ }
+ }
+ };
+
+ // MidiFramer for normalizing incoming data
+ private final MidiFramer mMidiFramer = new MidiFramer(mFramedDataReceiver);
+
+ public BluetoothPacketEncoder(PacketReceiver packetReceiver, int maxPacketSize) {
+ mPacketReceiver = packetReceiver;
+ mAccumulationBuffer = new byte[maxPacketSize];
+ }
+
+ @Override
+ public void onReceive(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+ // normalize the data by passing it through a MidiFramer first
+ mMidiFramer.sendWithTimestamp(msg, offset, count, timestamp);
+ }
+
+ @Override
+ public void writeComplete() {
+ synchronized (mLock) {
+ mWritePending = false;
+ flushLocked(false);
+ mLock.notify();
+ }
+ }
+
+ private void flushLocked(boolean canBlock) {
+ if (mWritePending && !canBlock) {
+ return;
+ }
+
+ while (mWritePending && mAccumulatedBytes > 0) {
+ try {
+ mLock.wait();
+ } catch (InterruptedException e) {
+ // try again
+ continue;
+ }
+ }
+
+ if (mAccumulatedBytes > 0) {
+ mPacketReceiver.writePacket(mAccumulationBuffer, mAccumulatedBytes);
+ mAccumulatedBytes = 0;
+ mPacketTimestamp = 0;
+ mRunningStatus = 0;
+ mWritePending = true;
+ }
+ }
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java
new file mode 100644
index 0000000..5202f9a
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/MidiBtleTimeTracker.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+/**
+ * Convert MIDI over BTLE timestamps to system nanotime.
+ */
+public class MidiBtleTimeTracker {
+
+ public final static long NANOS_PER_MILLI = 1000000L;
+
+ private final static long RANGE_MILLIS = 0x2000; // per MIDI / BTLE standard
+ private final static long RANGE_NANOS = RANGE_MILLIS * NANOS_PER_MILLI;
+
+ private int mWindowMillis = 20; // typical max connection interval
+ private long mWindowNanos = mWindowMillis * NANOS_PER_MILLI;
+
+ private int mPreviousTimestamp; // Used to calculate deltas.
+ private long mPreviousNow;
+ // Our model of the peripherals millisecond clock.
+ private long mPeripheralTimeMillis;
+ // Host time that corresponds to time=0 on the peripheral.
+ private long mBaseHostTimeNanos;
+ private long mPreviousResult; // To prevent retrograde timestamp
+
+ public MidiBtleTimeTracker(long now) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = now;
+ mPreviousNow = now;
+ }
+
+ /**
+ * @param timestamp
+ * 13-bit millis in range of 0 to 8191
+ * @param now
+ * current time in nanoseconds
+ * @return nanoseconds corresponding to the timestamp
+ */
+ public long convertTimestampToNanotime(int timestamp, long now) {
+ long deltaMillis = timestamp - mPreviousTimestamp;
+ // will be negative when timestamp wraps
+ if (deltaMillis < 0) {
+ deltaMillis += RANGE_MILLIS;
+ }
+ mPeripheralTimeMillis += deltaMillis;
+
+ // If we have not been called for a long time then
+ // make sure we have not wrapped multiple times.
+ if ((now - mPreviousNow) > (RANGE_NANOS / 2)) {
+ // Handle missed wraps.
+ long minimumTimeNanos = (now - mBaseHostTimeNanos)
+ - (RANGE_NANOS / 2);
+ long minimumTimeMillis = minimumTimeNanos / NANOS_PER_MILLI;
+ while (mPeripheralTimeMillis < minimumTimeMillis) {
+ mPeripheralTimeMillis += RANGE_MILLIS;
+ }
+ }
+
+ // Convert peripheral time millis to host time nanos.
+ long timestampHostNanos = (mPeripheralTimeMillis * NANOS_PER_MILLI)
+ + mBaseHostTimeNanos;
+
+ // The event cannot be in the future. So move window if we hit that.
+ if (timestampHostNanos > now) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = now;
+ timestampHostNanos = now;
+ } else {
+ // Timestamp should not be older than our window time.
+ long windowBottom = now - mWindowNanos;
+ if (timestampHostNanos < windowBottom) {
+ mPeripheralTimeMillis = 0;
+ mBaseHostTimeNanos = windowBottom;
+ timestampHostNanos = windowBottom;
+ }
+ }
+ // prevent retrograde timestamp
+ if (timestampHostNanos < mPreviousResult) {
+ timestampHostNanos = mPreviousResult;
+ }
+ mPreviousResult = timestampHostNanos;
+ mPreviousTimestamp = timestamp;
+ mPreviousNow = now;
+ return timestampHostNanos;
+ }
+
+ public int getWindowMillis() {
+ return mWindowMillis;
+ }
+
+ public void setWindowMillis(int window) {
+ this.mWindowMillis = window;
+ }
+
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java
new file mode 100644
index 0000000..da4b63a
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketDecoder.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+/**
+ * This is an abstract base class that decodes a packet buffer and passes it to a
+ * {@link android.media.midi.MidiReceiver}
+ */
+public abstract class PacketDecoder {
+
+ /**
+ * Decodes MIDI data in a packet and passes it to a {@link android.media.midi.MidiReceiver}
+ * @param buffer the packet to decode
+ * @param receiver the {@link android.media.midi.MidiReceiver} to receive the decoded MIDI data
+ */
+ abstract public void decodePacket(byte[] buffer, MidiReceiver receiver);
+}
diff --git a/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java
new file mode 100644
index 0000000..12c8b9b
--- /dev/null
+++ b/media/packages/BluetoothMidiService/src/com/android/bluetoothmidiservice/PacketEncoder.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bluetoothmidiservice;
+
+import android.media.midi.MidiReceiver;
+
+/**
+ * This is an abstract base class that encodes MIDI data into a packet buffer.
+ * PacketEncoder receives data via its {@link android.media.midi.MidiReceiver#onReceive} method
+ * and notifies its client of packets to write via the {@link PacketEncoder.PacketReceiver}
+ * interface.
+ */
+public abstract class PacketEncoder extends MidiReceiver {
+
+ public interface PacketReceiver {
+ /** Called to write an accumulated packet.
+ * @param buffer the packet buffer to write
+ * @param count the number of bytes in the packet buffer to write
+ */
+ public void writePacket(byte[] buffer, int count);
+ }
+
+ /**
+ * Called to inform PacketEncoder when the previous write is complete.
+ */
+ abstract public void writeComplete();
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
index d7069cac..e730329 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/mediarecorder/MediaRecorderTest.java
@@ -27,6 +27,7 @@ import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.hardware.Camera;
+import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.media.EncoderCapabilities;
@@ -426,6 +427,29 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
return validVideo;
}
+ private boolean validateMetadata(String filePath, int captureRate) {
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+
+ retriever.setDataSource(filePath);
+
+ // verify capture rate meta key is present and correct
+ String captureFps = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE);
+
+ if (captureFps == null) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is missing");
+ return false;
+ }
+
+ if (Math.abs(Float.parseFloat(captureFps) - captureRate) > 0.001) {
+ Log.d(TAG, "METADATA_KEY_CAPTURE_FRAMERATE is incorrect: "
+ + captureFps + "vs. " + captureRate);
+ return false;
+ }
+
+ // verify other meta keys here if necessary
+ return true;
+ }
@LargeTest
/*
* This test case set the camera in portrait mode.
@@ -555,13 +579,16 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
// always set videoOnly=false, MediaRecorder should disable
// audio automatically with time lapse/slow motion
- success = recordVideoFromSurface(frameRate,
- k==0 ? MIN_VIDEO_FPS : HIGH_SPEED_FPS,
- 352, 288, codec,
+ int captureRate = k==0 ? MIN_VIDEO_FPS : HIGH_SPEED_FPS;
+ success = recordVideoFromSurface(
+ frameRate, captureRate, 352, 288, codec,
MediaRecorder.OutputFormat.THREE_GPP,
filename, false /* videoOnly */);
if (success) {
success = validateVideo(filename, 352, 288);
+ if (success) {
+ success = validateMetadata(filename, captureRate);
+ }
}
if (!success) {
noOfFailure++;
@@ -569,6 +596,7 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase2<MediaFra
}
} catch (Exception e) {
Log.v(TAG, e.toString());
+ noOfFailure++;
}
assertTrue("testSurfaceRecordingTimeLapse", noOfFailure == 0);
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
index 362bbc4..14c2619 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
@@ -20,8 +20,6 @@ import android.hardware.CameraInfo;
import android.hardware.ICamera;
import android.hardware.ICameraClient;
import android.hardware.ICameraServiceListener;
-import android.hardware.IProCameraCallbacks;
-import android.hardware.IProCameraUser;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
import android.hardware.camera2.impl.CameraMetadataNative;
@@ -181,30 +179,6 @@ public class CameraBinderTest extends AndroidTestCase {
}
}
- static class DummyProCameraCallbacks extends DummyBase implements IProCameraCallbacks {
- }
-
- @SmallTest
- public void testConnectPro() throws Exception {
- for (int cameraId = 0; cameraId < mUtils.getGuessedNumCameras(); ++cameraId) {
-
- IProCameraCallbacks dummyCallbacks = new DummyProCameraCallbacks();
-
- String clientPackageName = getContext().getPackageName();
-
- BinderHolder holder = new BinderHolder();
- CameraBinderDecorator.newInstance(mUtils.getCameraService())
- .connectPro(dummyCallbacks, cameraId,
- clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder);
- IProCameraUser cameraUser = IProCameraUser.Stub.asInterface(holder.getBinder());
- assertNotNull(String.format("Camera %s was null", cameraId), cameraUser);
-
- Log.v(TAG, String.format("Camera %s connected", cameraId));
-
- cameraUser.disconnect();
- }
- }
-
@SmallTest
public void testConnectLegacy() throws Exception {
final int CAMERA_HAL_API_VERSION_1_0 = 0x100;
@@ -287,6 +261,16 @@ public class CameraBinderTest extends AndroidTestCase {
// TODO Auto-generated method stub
}
+
+ /*
+ * (non-Javadoc)
+ * @see android.hardware.camera2.ICameraDeviceCallbacks#onPrepared()
+ */
+ @Override
+ public void onPrepared(int streamId) throws RemoteException {
+ // TODO Auto-generated method stub
+
+ }
}
@SmallTest
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
index d756d05..d71b44b 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
@@ -19,12 +19,14 @@ package com.android.mediaframeworktest.integration;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.utils.BinderHolder;
import android.media.Image;
import android.media.ImageReader;
@@ -67,6 +69,7 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
private CameraBinderTestUtils mUtils;
private ICameraDeviceCallbacks.Stub mMockCb;
private Surface mSurface;
+ private OutputConfiguration mOutputConfiguration;
private HandlerThread mHandlerThread;
private Handler mHandler;
ImageReader mImageReader;
@@ -130,6 +133,16 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
// TODO Auto-generated method stub
}
+
+ /*
+ * (non-Javadoc)
+ * @see android.hardware.camera2.ICameraDeviceCallbacks#onPrepared()
+ */
+ @Override
+ public void onPrepared(int streamId) throws RemoteException {
+ // TODO Auto-generated method stub
+
+ }
}
class IsMetadataNotEmpty extends ArgumentMatcher<CameraMetadataNative> {
@@ -147,6 +160,7 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
MAX_NUM_IMAGES);
mImageReader.setOnImageAvailableListener(new ImageDropperListener(), mHandler);
mSurface = mImageReader.getSurface();
+ mOutputConfiguration = new OutputConfiguration(mSurface);
}
private CaptureRequest.Builder createDefaultBuilder(boolean needStream) throws Exception {
@@ -157,11 +171,12 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
assertEquals(CameraBinderTestUtils.NO_ERROR, status);
assertFalse(metadata.isEmpty());
- CaptureRequest.Builder request = new CaptureRequest.Builder(metadata);
+ CaptureRequest.Builder request = new CaptureRequest.Builder(metadata, /*reprocess*/false,
+ CameraCaptureSession.SESSION_ID_NONE);
assertFalse(request.isEmpty());
assertFalse(metadata.isEmpty());
if (needStream) {
- int streamId = mCameraUser.createStream(mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
request.addTarget(mSurface);
}
@@ -234,11 +249,11 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
@SmallTest
public void testCreateStream() throws Exception {
- int streamId = mCameraUser.createStream(mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
assertEquals(CameraBinderTestUtils.ALREADY_EXISTS,
- mCameraUser.createStream(mSurface));
+ mCameraUser.createStream(mOutputConfiguration));
assertEquals(CameraBinderTestUtils.NO_ERROR, mCameraUser.deleteStream(streamId));
}
@@ -255,18 +270,19 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
public void testCreateStreamTwo() throws Exception {
// Create first stream
- int streamId = mCameraUser.createStream(mSurface);
+ int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
assertEquals(CameraBinderTestUtils.ALREADY_EXISTS,
- mCameraUser.createStream(mSurface));
+ mCameraUser.createStream(mOutputConfiguration));
// Create second stream with a different surface.
SurfaceTexture surfaceTexture = new SurfaceTexture(/* ignored */0);
surfaceTexture.setDefaultBufferSize(640, 480);
Surface surface2 = new Surface(surfaceTexture);
+ OutputConfiguration output2 = new OutputConfiguration(surface2);
- int streamId2 = mCameraUser.createStream(surface2);
+ int streamId2 = mCameraUser.createStream(output2);
assertEquals(1, streamId2);
// Clean up streams