summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioManager.java42
-rw-r--r--media/java/android/media/AudioService.java200
-rw-r--r--media/java/android/media/AudioSystem.java10
-rw-r--r--media/java/android/media/IAudioService.aidl6
-rw-r--r--media/java/android/media/ImageReader.java29
-rw-r--r--media/java/android/media/MediaCodec.java46
-rw-r--r--media/java/android/media/MediaCodecInfo.java21
-rw-r--r--media/java/android/media/MediaFormat.java50
-rw-r--r--media/java/android/media/MediaPlayer.java71
-rw-r--r--media/java/android/media/MediaRecorder.java23
-rw-r--r--media/java/android/media/SubtitleData.java88
-rw-r--r--media/jni/android_media_ImageReader.cpp19
-rw-r--r--media/jni/android_media_MediaCodec.cpp28
-rw-r--r--media/jni/android_media_MediaCodec.h2
-rw-r--r--media/jni/android_media_MediaCodecList.cpp8
-rw-r--r--media/jni/android_media_MediaDrm.cpp16
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java26
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java15
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java98
19 files changed, 612 insertions, 186 deletions
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 14cdbb7..be83581 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -1316,19 +1316,6 @@ public class AudioManager {
}
/**
- * @hide
- * Signals whether remote submix audio rerouting is enabled.
- */
- public void setRemoteSubmixOn(boolean on, int address) {
- IAudioService service = getService();
- try {
- service.setRemoteSubmixOn(on, address);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setRemoteSubmixOn", e);
- }
- }
-
- /**
* Sets audio routing to the wired headset on or off.
*
* @param on set <var>true</var> to route audio to/from wired
@@ -2387,6 +2374,35 @@ public class AudioManager {
}
}
+ /**
+ * @hide
+ * Notifies AudioService that it is connected to an A2DP device that supports absolute volume,
+ * so that AudioService can send volume change events to the A2DP device, rather than handling
+ * them.
+ */
+ public void avrcpSupportsAbsoluteVolume(String address, boolean support) {
+ IAudioService service = getService();
+ try {
+ service.avrcpSupportsAbsoluteVolume(address, support);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in avrcpSupportsAbsoluteVolume", e);
+ }
+ }
+
+ /**
+ * @hide
+ * Notifies AudioService of the volume set on the A2DP device as a callback, so AudioService
+ * is able to update the UI.
+ */
+ public void avrcpUpdateVolume(int oldVolume, int volume) {
+ IAudioService service = getService();
+ try {
+ service.avrcpUpdateVolume(oldVolume, volume);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in avrcpUpdateVolume", e);
+ }
+ }
+
/**
* {@hide}
*/
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 290866e..3478007 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -140,24 +140,22 @@ public class AudioService extends IAudioService.Stub {
private static final int MSG_PERSIST_MASTER_VOLUME = 2;
private static final int MSG_PERSIST_RINGER_MODE = 3;
private static final int MSG_MEDIA_SERVER_DIED = 4;
- private static final int MSG_MEDIA_SERVER_STARTED = 5;
- private static final int MSG_PLAY_SOUND_EFFECT = 6;
- private static final int MSG_BTA2DP_DOCK_TIMEOUT = 7;
- private static final int MSG_LOAD_SOUND_EFFECTS = 8;
- private static final int MSG_SET_FORCE_USE = 9;
- private static final int MSG_BT_HEADSET_CNCT_FAILED = 10;
- private static final int MSG_SET_ALL_VOLUMES = 11;
- private static final int MSG_PERSIST_MASTER_VOLUME_MUTE = 12;
- private static final int MSG_REPORT_NEW_ROUTES = 13;
- private static final int MSG_SET_FORCE_BT_A2DP_USE = 14;
- private static final int MSG_SET_RSX_CONNECTION_STATE = 15; // change remote submix connection
- private static final int MSG_CHECK_MUSIC_ACTIVE = 16;
- private static final int MSG_BROADCAST_AUDIO_BECOMING_NOISY = 17;
- private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME = 18;
- private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED = 19;
- private static final int MSG_PERSIST_SAFE_VOLUME_STATE = 20;
- private static final int MSG_BROADCAST_BT_CONNECTION_STATE = 21;
- private static final int MSG_UNLOAD_SOUND_EFFECTS = 22;
+ private static final int MSG_PLAY_SOUND_EFFECT = 5;
+ private static final int MSG_BTA2DP_DOCK_TIMEOUT = 6;
+ private static final int MSG_LOAD_SOUND_EFFECTS = 7;
+ private static final int MSG_SET_FORCE_USE = 8;
+ private static final int MSG_BT_HEADSET_CNCT_FAILED = 9;
+ private static final int MSG_SET_ALL_VOLUMES = 10;
+ private static final int MSG_PERSIST_MASTER_VOLUME_MUTE = 11;
+ private static final int MSG_REPORT_NEW_ROUTES = 12;
+ private static final int MSG_SET_FORCE_BT_A2DP_USE = 13;
+ private static final int MSG_CHECK_MUSIC_ACTIVE = 14;
+ private static final int MSG_BROADCAST_AUDIO_BECOMING_NOISY = 15;
+ private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME = 16;
+ private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED = 17;
+ private static final int MSG_PERSIST_SAFE_VOLUME_STATE = 18;
+ private static final int MSG_BROADCAST_BT_CONNECTION_STATE = 19;
+ private static final int MSG_UNLOAD_SOUND_EFFECTS = 20;
// start of messages handled under wakelock
// these messages can only be queued, i.e. sent with queueMsgUnderWakeLock(),
// and not with sendMsg(..., ..., SENDMSG_QUEUE, ...)
@@ -181,8 +179,6 @@ public class AudioService extends IAudioService.Stub {
// protects mRingerMode
private final Object mSettingsLock = new Object();
- private boolean mMediaServerOk;
-
private SoundPool mSoundPool;
private final Object mSoundEffectsLock = new Object();
private static final int NUM_SOUNDPOOL_CHANNELS = 4;
@@ -287,23 +283,13 @@ public class AudioService extends IAudioService.Stub {
public void onError(int error) {
switch (error) {
case AudioSystem.AUDIO_STATUS_SERVER_DIED:
- if (mMediaServerOk) {
- sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SENDMSG_NOOP, 0, 0,
- null, 1500);
- mMediaServerOk = false;
- }
- break;
- case AudioSystem.AUDIO_STATUS_OK:
- if (!mMediaServerOk) {
- sendMsg(mAudioHandler, MSG_MEDIA_SERVER_STARTED, SENDMSG_NOOP, 0, 0,
- null, 0);
- mMediaServerOk = true;
- }
+ sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED,
+ SENDMSG_NOOP, 0, 0, null, 0);
break;
default:
break;
}
- }
+ }
};
/**
@@ -460,6 +446,12 @@ public class AudioService extends IAudioService.Stub {
private final MediaFocusControl mMediaFocusControl;
+ // Reference to BluetoothA2dp to query for AbsoluteVolume.
+ private BluetoothA2dp mA2dp;
+ private final Object mA2dpAvrcpLock = new Object();
+ // If absolute volume is supported in AVRCP device
+ private boolean mAvrcpAbsVolSupported = false;
+
///////////////////////////////////////////////////////////////////////////
// Construction
///////////////////////////////////////////////////////////////////////////
@@ -494,6 +486,8 @@ public class AudioService extends IAudioService.Stub {
mMediaFocusControl = new MediaFocusControl(mAudioHandler.getLooper(),
mContext, /*VolumeController*/ mVolumePanel, this);
+ AudioSystem.setErrorCallback(mAudioSystemCallback);
+
boolean cameraSoundForced = mContext.getResources().getBoolean(
com.android.internal.R.bool.config_camera_sound_forced);
mCameraSoundForced = new Boolean(cameraSoundForced);
@@ -523,15 +517,12 @@ public class AudioService extends IAudioService.Stub {
createStreamStates();
readAndSetLowRamDevice();
- mMediaServerOk = true;
// Call setRingerModeInt() to apply correct mute
// state on streams affected by ringer mode.
mRingerModeMutedStreams = 0;
setRingerModeInt(getRingerMode(), false);
- AudioSystem.setErrorCallback(mAudioSystemCallback);
-
// Register for device connection intent broadcasts.
IntentFilter intentFilter =
new IntentFilter(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
@@ -901,6 +892,15 @@ public class AudioService extends IAudioService.Stub {
int oldIndex = mStreamStates[streamType].getIndex(device);
if (adjustVolume && (direction != AudioManager.ADJUST_SAME)) {
+ // Check if volume update should be send to AVRCP
+ synchronized (mA2dpAvrcpLock) {
+ if (mA2dp != null && mAvrcpAbsVolSupported) {
+ mA2dp.adjustAvrcpAbsoluteVolume(direction);
+ return;
+ // No need to send volume update, because we will update the volume with a
+ // callback from Avrcp.
+ }
+ }
if ((direction == AudioManager.ADJUST_RAISE) &&
!checkSafeMediaVolume(streamTypeAlias, aliasIndex + step, device)) {
Log.e(TAG, "adjustStreamVolume() safe volume index = "+oldIndex);
@@ -998,6 +998,15 @@ public class AudioService extends IAudioService.Stub {
index = rescaleIndex(index * 10, streamType, streamTypeAlias);
+ synchronized (mA2dpAvrcpLock) {
+ if (mA2dp != null && mAvrcpAbsVolSupported) {
+ mA2dp.setAvrcpAbsoluteVolume(index);
+ return;
+ // No need to send volume update, because we will update the volume with a
+ // callback from Avrcp.
+ }
+ }
+
flags &= ~AudioManager.FLAG_FIXED_VOLUME;
if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) &&
((device & mFixedVolumeDevices) != 0)) {
@@ -2268,21 +2277,23 @@ public class AudioService extends IAudioService.Stub {
List<BluetoothDevice> deviceList;
switch(profile) {
case BluetoothProfile.A2DP:
- BluetoothA2dp a2dp = (BluetoothA2dp) proxy;
- deviceList = a2dp.getConnectedDevices();
- if (deviceList.size() > 0) {
- btDevice = deviceList.get(0);
- synchronized (mConnectedDevices) {
- int state = a2dp.getConnectionState(btDevice);
- int delay = checkSendBecomingNoisyIntent(
- AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
- (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0);
- queueMsgUnderWakeLock(mAudioHandler,
- MSG_SET_A2DP_CONNECTION_STATE,
- state,
- 0,
- btDevice,
- delay);
+ synchronized (mA2dpAvrcpLock) {
+ mA2dp = (BluetoothA2dp) proxy;
+ deviceList = mA2dp.getConnectedDevices();
+ if (deviceList.size() > 0) {
+ btDevice = deviceList.get(0);
+ synchronized (mConnectedDevices) {
+ int state = mA2dp.getConnectionState(btDevice);
+ int delay = checkSendBecomingNoisyIntent(
+ AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
+ (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0);
+ queueMsgUnderWakeLock(mAudioHandler,
+ MSG_SET_A2DP_CONNECTION_STATE,
+ state,
+ 0,
+ btDevice,
+ delay);
+ }
}
}
break;
@@ -2344,10 +2355,13 @@ public class AudioService extends IAudioService.Stub {
public void onServiceDisconnected(int profile) {
switch(profile) {
case BluetoothProfile.A2DP:
- synchronized (mConnectedDevices) {
- if (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)) {
- makeA2dpDeviceUnavailableNow(
- mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP));
+ synchronized (mA2dpAvrcpLock) {
+ mA2dp = null;
+ synchronized (mConnectedDevices) {
+ if (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)) {
+ makeA2dpDeviceUnavailableNow(
+ mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP));
+ }
}
}
break;
@@ -2364,26 +2378,6 @@ public class AudioService extends IAudioService.Stub {
}
};
- /** see AudioManager.setRemoteSubmixOn(boolean on) */
- public void setRemoteSubmixOn(boolean on, int address) {
- sendMsg(mAudioHandler, MSG_SET_RSX_CONNECTION_STATE,
- SENDMSG_REPLACE /* replace with QUEUE when multiple addresses are supported */,
- on ? 1 : 0 /*arg1*/,
- address /*arg2*/,
- null/*obj*/, 0/*delay*/);
- }
-
- private void onSetRsxConnectionState(int available, int address) {
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_IN_REMOTE_SUBMIX,
- available == 1 ?
- AudioSystem.DEVICE_STATE_AVAILABLE : AudioSystem.DEVICE_STATE_UNAVAILABLE,
- String.valueOf(address) /*device_address*/);
- AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_REMOTE_SUBMIX,
- available == 1 ?
- AudioSystem.DEVICE_STATE_AVAILABLE : AudioSystem.DEVICE_STATE_UNAVAILABLE,
- String.valueOf(address) /*device_address*/);
- }
-
private void onCheckMusicActive() {
synchronized (mSafeMediaVolumeState) {
if (mSafeMediaVolumeState == SAFE_MEDIA_VOLUME_INACTIVE) {
@@ -3424,21 +3418,17 @@ public class AudioService extends IAudioService.Stub {
break;
case MSG_MEDIA_SERVER_DIED:
- if (!mMediaServerOk) {
+ if (AudioSystem.checkAudioFlinger() != AudioSystem.AUDIO_STATUS_OK) {
Log.e(TAG, "Media server died.");
- // Force creation of new IAudioFlinger interface so that we are notified
- // when new media_server process is back to life.
- AudioSystem.setErrorCallback(mAudioSystemCallback);
sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SENDMSG_NOOP, 0, 0,
null, 500);
+ break;
}
- break;
-
- case MSG_MEDIA_SERVER_STARTED:
Log.e(TAG, "Media server started.");
+
// indicate to audio HAL that we start the reconfiguration phase after a media
// server crash
- // Note that MSG_MEDIA_SERVER_STARTED message is only received when the media server
+ // Note that we only execute this when the media server
// process restarts after a crash, not the first time it is started.
AudioSystem.setParameters("restarting=true");
@@ -3571,10 +3561,6 @@ public class AudioService extends IAudioService.Stub {
break;
}
- case MSG_SET_RSX_CONNECTION_STATE:
- onSetRsxConnectionState(msg.arg1/*available*/, msg.arg2/*address*/);
- break;
-
case MSG_CHECK_MUSIC_ACTIVE:
onCheckMusicActive();
break;
@@ -3697,6 +3683,7 @@ public class AudioService extends IAudioService.Stub {
private void onSetA2dpConnectionState(BluetoothDevice btDevice, int state)
{
+ if (DEBUG_VOL) Log.d(TAG, "onSetA2dpConnectionState btDevice="+btDevice+" state="+state);
if (btDevice == null) {
return;
}
@@ -3704,6 +3691,20 @@ public class AudioService extends IAudioService.Stub {
if (!BluetoothAdapter.checkBluetoothAddress(address)) {
address = "";
}
+
+ // Disable absolute volume, if device is disconnected
+ synchronized (mA2dpAvrcpLock) {
+ if (state == BluetoothProfile.STATE_DISCONNECTED && mAvrcpAbsVolSupported) {
+ mAvrcpAbsVolSupported = false;
+ sendMsg(mAudioHandler,
+ MSG_SET_DEVICE_VOLUME,
+ SENDMSG_QUEUE,
+ getDeviceForStream(AudioSystem.STREAM_MUSIC),
+ 0,
+ mStreamStates[AudioSystem.STREAM_MUSIC],
+ 0);
+ }
+ }
synchronized (mConnectedDevices) {
boolean isConnected =
(mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) &&
@@ -3754,6 +3755,31 @@ public class AudioService extends IAudioService.Stub {
}
}
+ public void avrcpSupportsAbsoluteVolume(String address, boolean support) {
+ // address is not used for now, but may be used when multiple a2dp devices are supported
+ synchronized (mA2dpAvrcpLock) {
+ mAvrcpAbsVolSupported = support;
+ if (support) {
+ VolumeStreamState streamState = mStreamStates[AudioSystem.STREAM_MUSIC];
+ int device = getDeviceForStream(AudioSystem.STREAM_MUSIC);
+ streamState.setIndex(streamState.getMaxIndex(), device);
+ sendMsg(mAudioHandler,
+ MSG_SET_DEVICE_VOLUME,
+ SENDMSG_QUEUE,
+ device,
+ 0,
+ streamState,
+ 0);
+ }
+ }
+ }
+
+ public void avrcpUpdateVolume(int oldVolume, int volume) {
+ mStreamStates[AudioSystem.STREAM_MUSIC].
+ setIndex(volume, getDeviceForStream(AudioSystem.STREAM_MUSIC));
+ sendVolumeUpdate(AudioSystem.STREAM_MUSIC, oldVolume, volume, AudioManager.FLAG_SHOW_UI);
+ }
+
private boolean handleDeviceConnection(boolean connected, int device, String params) {
synchronized (mConnectedDevices) {
boolean isConnected = (mConnectedDevices.containsKey(device) &&
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 4805da5..661b0fd 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -177,12 +177,10 @@ public class AudioSystem
{
synchronized (AudioSystem.class) {
mErrorCallback = cb;
+ if (cb != null) {
+ cb.onError(checkAudioFlinger());
+ }
}
- // Calling a method on AudioFlinger here makes sure that we bind to IAudioFlinger
- // binder interface death. Not doing that would result in not being notified of
- // media_server process death if no other method is called on AudioSystem that reaches
- // to AudioFlinger.
- isMicrophoneMuted();
}
private static void errorCallbackFromNative(int error)
@@ -404,5 +402,5 @@ public class AudioSystem
public static native int getOutputLatency(int stream);
public static native int setLowRamDevice(boolean isLowRamDevice);
-
+ public static native int checkAudioFlinger();
}
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index b4c8a04..d8ce36a 100644
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -98,6 +98,10 @@ interface IAudioService {
oneway void reloadAudioSettings();
+ oneway void avrcpSupportsAbsoluteVolume(String address, boolean support);
+
+ oneway void avrcpUpdateVolume(int oldVolume, int volume);
+
void setSpeakerphoneOn(boolean on);
boolean isSpeakerphoneOn();
@@ -110,8 +114,6 @@ interface IAudioService {
boolean isBluetoothA2dpOn();
- oneway void setRemoteSubmixOn(boolean on, int address);
-
int requestAudioFocus(int mainStreamType, int durationHint, IBinder cb,
IAudioFocusDispatcher fd, String clientId, String callingPackageName);
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index 8ddc094..f3356c9 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -20,7 +20,6 @@ import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.os.Handler;
import android.os.Looper;
-import android.os.Message;
import android.view.Surface;
import java.lang.ref.WeakReference;
@@ -130,11 +129,26 @@ public final class ImageReader implements AutoCloseable {
}
/**
- * <p>Get the next Image from the ImageReader's queue. Returns {@code null}
- * if no new image is available.</p>
+ * <p>
+ * Get the next Image from the ImageReader's queue. Returns {@code null} if
+ * no new image is available.
+ * </p>
+ * <p>
+ * This operation will fail by throwing an
+ * {@link Surface.OutOfResourcesException OutOfResourcesException} if too
+ * many images have been acquired with {@link #getNextImage}. In particular
+ * a sequence of {@link #getNextImage} calls greater than {@link #getMaxImages}
+ * without calling {@link Image#close} or {@link #releaseImage} in-between
+ * will exhaust the underlying queue. At such a time,
+ * {@link Surface.OutOfResourcesException OutOfResourcesException} will be
+ * thrown until more images are released with {@link Image#close} or
+ * {@link #releaseImage}.
+ * </p>
*
* @return a new frame of image data, or {@code null} if no image data is
- * available.
+ * available.
+ * @throws Surface.OutOfResourcesException if too many images are currently
+ * acquired
*/
public Image getNextImage() {
SurfaceImage si = new SurfaceImage();
@@ -172,6 +186,8 @@ public final class ImageReader implements AutoCloseable {
* @param listener the listener that will be run
* @param handler The handler on which the listener should be invoked, or null
* if the listener should be invoked on the calling thread's looper.
+ *
+ * @throws IllegalArgumentException if no handler specified and the calling thread has no looper
*/
public void setImageAvailableListener(OnImageAvailableListener listener, Handler handler) {
mImageListener = listener;
@@ -260,8 +276,9 @@ public final class ImageReader implements AutoCloseable {
* Called from Native code when an Event happens.
*/
private static void postEventFromNative(Object selfRef) {
- WeakReference weakSelf = (WeakReference)selfRef;
- final ImageReader ir = (ImageReader)weakSelf.get();
+ @SuppressWarnings("unchecked")
+ WeakReference<ImageReader> weakSelf = (WeakReference<ImageReader>)selfRef;
+ final ImageReader ir = weakSelf.get();
if (ir == null) {
return;
}
diff --git a/media/java/android/media/MediaCodec.java b/media/java/android/media/MediaCodec.java
index 703eb27..f4e867e 100644
--- a/media/java/android/media/MediaCodec.java
+++ b/media/java/android/media/MediaCodec.java
@@ -564,6 +564,52 @@ final public class MediaCodec {
public native final String getName();
/**
+ * Change a video encoder's target bitrate on the fly. The value is an
+ * Integer object containing the new bitrate in bps.
+ */
+ public static final String PARAMETER_KEY_VIDEO_BITRATE = "videoBitrate";
+
+ /**
+ * Temporarily suspend/resume encoding of input data. While suspended
+ * input data is effectively discarded instead of being fed into the
+ * encoder. This parameter really only makes sense to use with an encoder
+ * in "surface-input" mode, as the client code has no control over the
+ * input-side of the encoder in that case.
+ * The value is an Integer object containing the value 1 to suspend
+ * or the value 0 to resume.
+ */
+ public static final String PARAMETER_KEY_SUSPEND = "drop-input-frames";
+
+ /**
+ * Request that the encoder produce a sync frame "soon".
+ * Provide an Integer with the value 0.
+ */
+ public static final String PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+
+ /**
+ * Communicate additional parameter changes to the component instance.
+ */
+ public final void setParameters(Map<String, Object> params) {
+ if (params == null) {
+ return;
+ }
+
+ String[] keys = new String[params.size()];
+ Object[] values = new Object[params.size()];
+
+ int i = 0;
+ for (Map.Entry<String, Object> entry: params.entrySet()) {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ ++i;
+ }
+
+ setParameters(keys, values);
+ }
+
+ private native final void setParameters(String[] keys, Object[] values);
+
+ /**
* Get the codec info. If the codec was created by createDecoderByType
* or createEncoderByType, what component is chosen is not known beforehand,
* and thus the caller does not have the MediaCodecInfo.
diff --git a/media/java/android/media/MediaCodecInfo.java b/media/java/android/media/MediaCodecInfo.java
index aeed7d4..90c12c6 100644
--- a/media/java/android/media/MediaCodecInfo.java
+++ b/media/java/android/media/MediaCodecInfo.java
@@ -72,7 +72,8 @@ public final class MediaCodecInfo {
/**
* Encapsulates the capabilities of a given codec component.
* For example, what profile/level combinations it supports and what colorspaces
- * it is capable of providing the decoded data in.
+ * it is capable of providing the decoded data in, as well as some
+ * codec-type specific capability flags.
* <p>You can get an instance for a given {@link MediaCodecInfo} object with
* {@link MediaCodecInfo#getCapabilitiesForType getCapabilitiesForType()}, passing a MIME type.
*/
@@ -139,6 +140,24 @@ public final class MediaCodecInfo {
* OMX_COLOR_FORMATTYPE.
*/
public int[] colorFormats;
+
+ private final static int FLAG_SupportsAdaptivePlayback = (1 << 0);
+ private int flags;
+
+ /**
+ * <b>video decoder only</b>: codec supports seamless resolution changes.
+ */
+ public final static String FEATURE_AdaptivePlayback = "adaptive-playback";
+
+ /**
+ * Query codec feature capabilities.
+ */
+ public final boolean isFeatureSupported(String name) {
+ if (name.equals(FEATURE_AdaptivePlayback)) {
+ return (flags & FLAG_SupportsAdaptivePlayback) != 0;
+ }
+ return false;
+ }
};
/**
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index 3fbaf69..949a42c 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -26,7 +26,7 @@ import java.util.Map;
*
* The format of the media data is specified as string/value pairs.
*
- * Keys common to all formats, <b>all keys not marked optional are mandatory</b>:
+ * Keys common to all audio/video formats, <b>all keys not marked optional are mandatory</b>:
*
* <table>
* <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
@@ -44,6 +44,8 @@ import java.util.Map;
* for encoders, readable in the output format of decoders</b></td></tr>
* <tr><td>{@link #KEY_FRAME_RATE}</td><td>Integer or Float</td><td><b>encoder-only</b></td></tr>
* <tr><td>{@link #KEY_I_FRAME_INTERVAL}</td><td>Integer</td><td><b>encoder-only</b></td></tr>
+ * <tr><td>{@link #KEY_REPEAT_PREVIOUS_FRAME_AFTER}</td><td>Long</td><td><b>video encoder in surface-mode only</b></td></tr>
+ * <tr><td>{@link #KEY_PUSH_BLANK_BUFFERS_ON_STOP}</td><td>Integer(1)</td><td><b>video decoder rendering to a surface only</b></td></tr>
* </table>
*
* Audio formats have the following keys:
@@ -57,6 +59,11 @@ import java.util.Map;
* <tr><td>{@link #KEY_FLAC_COMPRESSION_LEVEL}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is FLAC audio, specifies the desired compression level.</td></tr>
* </table>
*
+ * Subtitle formats have the following keys:
+ * <table>
+ * <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr>
+ * <tr><td>{@link #KEY_LANGUAGE}</td><td>String</td><td>The language of the content.</td></tr>
+ * </table>
*/
public final class MediaFormat {
private Map<String, Object> mMap;
@@ -68,6 +75,12 @@ public final class MediaFormat {
public static final String KEY_MIME = "mime";
/**
+ * A key describing the language of the content.
+ * The associated value is a string.
+ */
+ public static final String KEY_LANGUAGE = "language";
+
+ /**
* A key describing the sample rate of an audio format.
* The associated value is an integer
*/
@@ -132,6 +145,24 @@ public final class MediaFormat {
public static final String KEY_SLICE_HEIGHT = "slice-height";
/**
+ * Applies only when configuring a video encoder in "surface-input" mode.
+ * The associated value is a long and gives the time in microseconds
+ * after which the frame previously submitted to the encoder will be
+ * repeated (once) if no new frame became available since.
+ */
+ public static final String KEY_REPEAT_PREVIOUS_FRAME_AFTER
+ = "repeat-previous-frame-after";
+
+ /**
+ * If specified when configuring a video decoder rendering to a surface,
+ * causes the decoder to output "blank", i.e. black frames to the surface
+ * when stopped to clear out any previously displayed contents.
+ * The associated value is an integer of value 1.
+ */
+ public static final String KEY_PUSH_BLANK_BUFFERS_ON_STOP
+ = "push-blank-buffers-on-shutdown";
+
+ /**
* A key describing the duration (in microseconds) of the content.
* The associated value is a long.
*/
@@ -277,6 +308,23 @@ public final class MediaFormat {
}
/**
+ * Creates a minimal subtitle format.
+ * @param mime The mime type of the content.
+ * @param language The language of the content. Specify "und" if language
+ * information is only included in the content (similarly, if there
+ * are multiple language tracks in the content.)
+ */
+ public static final MediaFormat createSubtitleFormat(
+ String mime,
+ String language) {
+ MediaFormat format = new MediaFormat();
+ format.setString(KEY_MIME, mime);
+ format.setString(KEY_LANGUAGE, language);
+
+ return format;
+ }
+
+ /**
* Creates a minimal video format.
* @param mime The mime type of the content.
* @param width The width of the content (in pixels)
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index 241c7fa..946dd71 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -38,6 +38,8 @@ import android.view.SurfaceHolder;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.media.AudioManager;
+import android.media.MediaFormat;
+import android.media.SubtitleData;
import java.io.File;
import java.io.FileDescriptor;
@@ -1335,6 +1337,7 @@ public class MediaPlayer
mOnInfoListener = null;
mOnVideoSizeChangedListener = null;
mOnTimedTextListener = null;
+ mOnSubtitleDataListener = null;
_release();
}
@@ -1526,20 +1529,43 @@ public class MediaPlayer
* ISO-639-2 language code, "und", is returned.
*/
public String getLanguage() {
- return mLanguage;
+ String language = mFormat.getString(MediaFormat.KEY_LANGUAGE);
+ return language == null ? "und" : language;
+ }
+
+ /**
+ * Gets the {@link MediaFormat} of the track. If the format is
+ * unknown or could not be determined, null is returned.
+ */
+ public MediaFormat getFormat() {
+ if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ return mFormat;
+ }
+ return null;
}
public static final int MEDIA_TRACK_TYPE_UNKNOWN = 0;
public static final int MEDIA_TRACK_TYPE_VIDEO = 1;
public static final int MEDIA_TRACK_TYPE_AUDIO = 2;
public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3;
+ /** @hide */
+ public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4;
final int mTrackType;
- final String mLanguage;
+ final MediaFormat mFormat;
TrackInfo(Parcel in) {
mTrackType = in.readInt();
- mLanguage = in.readString();
+ // TODO: parcel in the full MediaFormat
+ String language = in.readString();
+
+ if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ mFormat = MediaFormat.createSubtitleFormat(
+ MEDIA_MIMETYPE_TEXT_SUBRIP, language);
+ } else {
+ mFormat = new MediaFormat();
+ mFormat.setString(MediaFormat.KEY_LANGUAGE, language);
+ }
}
/**
@@ -1556,7 +1582,7 @@ public class MediaPlayer
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mTrackType);
- dest.writeString(mLanguage);
+ dest.writeString(getLanguage());
}
/**
@@ -1891,6 +1917,7 @@ public class MediaPlayer
private static final int MEDIA_TIMED_TEXT = 99;
private static final int MEDIA_ERROR = 100;
private static final int MEDIA_INFO = 200;
+ private static final int MEDIA_SUBTITLE_DATA = 201;
private class EventHandler extends Handler
{
@@ -1970,6 +1997,18 @@ public class MediaPlayer
}
return;
+ case MEDIA_SUBTITLE_DATA:
+ if (mOnSubtitleDataListener == null) {
+ return;
+ }
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel) msg.obj;
+ SubtitleData data = new SubtitleData(parcel);
+ parcel.recycle();
+ mOnSubtitleDataListener.onSubtitleData(mMediaPlayer, data);
+ }
+ return;
+
case MEDIA_NOP: // interface test message - ignore
break;
@@ -2181,6 +2220,30 @@ public class MediaPlayer
private OnTimedTextListener mOnTimedTextListener;
+ /**
+ * Interface definition of a callback to be invoked when a
+ * track has data available.
+ *
+ * @hide
+ */
+ public interface OnSubtitleDataListener
+ {
+ public void onSubtitleData(MediaPlayer mp, SubtitleData data);
+ }
+
+ /**
+ * Register a callback to be invoked when a track has data available.
+ *
+ * @param listener the callback that will be run
+ *
+ * @hide
+ */
+ public void setOnSubtitleDataListener(OnSubtitleDataListener listener)
+ {
+ mOnSubtitleDataListener = listener;
+ }
+
+ private OnSubtitleDataListener mOnSubtitleDataListener;
/* Do not change these values without updating their counterparts
* in include/media/mediaplayer.h!
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 3e688db..1d2b889 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -179,6 +179,27 @@ public class MediaRecorder
* is applied.
*/
public static final int VOICE_COMMUNICATION = 7;
+
+ /**
+ * Audio source for a submix of audio streams to be presented remotely.
+ * <p>
+ * An application can use this audio source to capture a mix of audio streams
+ * that should be transmitted to a remote receiver such as a Wifi display.
+ * While recording is active, these audio streams are redirected to the remote
+ * submix instead of being played on the device speaker or headset.
+ * </p><p>
+ * Certain streams are excluded from the remote submix, including
+ * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_ALARM},
+ * and {@link AudioManager#STREAM_NOTIFICATION}. These streams will continue
+ * to be presented locally as usual.
+ * </p><p>
+ * Capturing the remote submix audio requires the
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission.
+ * This permission is reserved for use by system components and is not available to
+ * third-party applications.
+ * </p>
+ */
+ public static final int REMOTE_SUBMIX = 8;
}
/**
@@ -294,7 +315,7 @@ public class MediaRecorder
* @see android.media.MediaRecorder.AudioSource
*/
public static final int getAudioSourceMax() {
- return AudioSource.VOICE_COMMUNICATION;
+ return AudioSource.REMOTE_SUBMIX;
}
/**
diff --git a/media/java/android/media/SubtitleData.java b/media/java/android/media/SubtitleData.java
new file mode 100644
index 0000000..f552e82
--- /dev/null
+++ b/media/java/android/media/SubtitleData.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.util.Log;
+
+/**
+ * @hide
+ *
+ * Class to hold the subtitle track's data, including:
+ * <ul>
+ * <li> Track index</li>
+ * <li> Start time (in microseconds) of the data</li>
+ * <li> Duration (in microseconds) of the data</li>
+ * <li> A byte-array of the data</li>
+ * </ul>
+ *
+ * <p> To receive the subtitle data, applications need to do the following:
+ *
+ * <ul>
+ * <li> Select a track of type MEDIA_TRACK_TYPE_SUBTITLE with {@link MediaPlayer.selectTrack(int)</li>
+ * <li> Implement the {@link MediaPlayer.OnSubtitleDataListener} interface</li>
+ * <li> Register the {@link MediaPlayer.OnSubtitleDataListener} callback on a MediaPlayer object</li>
+ * </ul>
+ *
+ * @see android.media.MediaPlayer
+ */
+public final class SubtitleData
+{
+ private static final String TAG = "SubtitleData";
+
+ private int mTrackIndex;
+ private long mStartTimeUs;
+ private long mDurationUs;
+ private byte[] mData;
+
+ public SubtitleData(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ public int getTrackIndex() {
+ return mTrackIndex;
+ }
+
+ public long getStartTimeUs() {
+ return mStartTimeUs;
+ }
+
+ public long getDurationUs() {
+ return mDurationUs;
+ }
+
+ public byte[] getData() {
+ return mData;
+ }
+
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ mTrackIndex = parcel.readInt();
+ mStartTimeUs = parcel.readLong();
+ mDurationUs = parcel.readLong();
+ mData = new byte[parcel.readInt()];
+ parcel.readByteArray(mData);
+
+ return true;
+ }
+}
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index cd589de..7d914d2 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -44,6 +44,9 @@
using namespace android;
+static const char* const OutOfResourcesException =
+ "android/view/Surface$OutOfResourcesException";
+
enum {
IMAGE_READER_MAX_NUM_PLANES = 3,
};
@@ -609,7 +612,8 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
nativeFormat = Image_getPixelFormat(env, format);
sp<BufferQueue> bq = new BufferQueue();
- sp<CpuConsumer> consumer = new CpuConsumer(bq, true, maxImages);
+ sp<CpuConsumer> consumer = new CpuConsumer(bq, maxImages,
+ /*controlledByApp*/true);
// TODO: throw dvm exOutOfMemoryError?
if (consumer == NULL) {
jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
@@ -702,7 +706,17 @@ static jboolean ImageReader_imageSetup(JNIEnv* env, jobject thiz,
status_t res = consumer->lockNextBuffer(buffer);
if (res != NO_ERROR) {
if (res != BAD_VALUE /*no buffers*/) {
- ALOGE("%s Fail to lockNextBuffer with error: %d ", __FUNCTION__, res);
+ if (res == NOT_ENOUGH_DATA) {
+ jniThrowException(env, OutOfResourcesException,
+ "Too many outstanding images, close existing images"
+ " to be able to acquire more.");
+ } else {
+ ALOGE("%s Fail to lockNextBuffer with error: %d ",
+ __FUNCTION__, res);
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Unknown error (%d) when we tried to lock buffer.",
+ res);
+ }
}
return false;
}
@@ -714,6 +728,7 @@ static jboolean ImageReader_imageSetup(JNIEnv* env, jobject thiz,
ALOGE("crop left: %d, top = %d", lt.x, lt.y);
jniThrowException(env, "java/lang/UnsupportedOperationException",
"crop left top corner need to at origin");
+ return false;
}
// Check if the producer buffer configurations match what ImageReader configured.
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
index ae1db87..a859506 100644
--- a/media/jni/android_media_MediaCodec.cpp
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -310,6 +310,10 @@ status_t JMediaCodec::getName(JNIEnv *env, jstring *nameStr) const {
return OK;
}
+status_t JMediaCodec::setParameters(const sp<AMessage> &msg) {
+ return mCodec->setParameters(msg);
+}
+
void JMediaCodec::setVideoScalingMode(int mode) {
if (mSurfaceTextureClient != NULL) {
native_window_set_scaling_mode(mSurfaceTextureClient.get(), mode);
@@ -837,6 +841,27 @@ static jobject android_media_MediaCodec_getName(
return NULL;
}
+static void android_media_MediaCodec_setParameters(
+ JNIEnv *env, jobject thiz, jobjectArray keys, jobjectArray vals) {
+ ALOGV("android_media_MediaCodec_setParameters");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ sp<AMessage> params;
+ status_t err = ConvertKeyValueArraysToMessage(env, keys, vals, &params);
+
+ if (err == OK) {
+ err = codec->setParameters(params);
+ }
+
+ throwExceptionAsNecessary(env, err);
+}
+
static void android_media_MediaCodec_setVideoScalingMode(
JNIEnv *env, jobject thiz, jint mode) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
@@ -986,6 +1011,9 @@ static JNINativeMethod gMethods[] = {
{ "getName", "()Ljava/lang/String;",
(void *)android_media_MediaCodec_getName },
+ { "setParameters", "([Ljava/lang/String;[Ljava/lang/Object;)V",
+ (void *)android_media_MediaCodec_setParameters },
+
{ "setVideoScalingMode", "(I)V",
(void *)android_media_MediaCodec_setVideoScalingMode },
diff --git a/media/jni/android_media_MediaCodec.h b/media/jni/android_media_MediaCodec.h
index 282d2c5..2fbbd72 100644
--- a/media/jni/android_media_MediaCodec.h
+++ b/media/jni/android_media_MediaCodec.h
@@ -87,6 +87,8 @@ struct JMediaCodec : public RefBase {
status_t getName(JNIEnv *env, jstring *name) const;
+ status_t setParameters(const sp<AMessage> &params);
+
void setVideoScalingMode(int mode);
protected:
diff --git a/media/jni/android_media_MediaCodecList.cpp b/media/jni/android_media_MediaCodecList.cpp
index 04430ec..caa594e 100644
--- a/media/jni/android_media_MediaCodecList.cpp
+++ b/media/jni/android_media_MediaCodecList.cpp
@@ -110,10 +110,11 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
Vector<MediaCodecList::ProfileLevel> profileLevels;
Vector<uint32_t> colorFormats;
+ uint32_t flags;
status_t err =
MediaCodecList::getInstance()->getCodecCapabilities(
- index, typeStr, &profileLevels, &colorFormats);
+ index, typeStr, &profileLevels, &colorFormats, &flags);
env->ReleaseStringUTFChars(type, typeStr);
typeStr = NULL;
@@ -127,6 +128,9 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
env->FindClass("android/media/MediaCodecInfo$CodecCapabilities");
CHECK(capsClazz != NULL);
+ jfieldID flagsField =
+ env->GetFieldID(capsClazz, "flags", "I");
+
jobject caps = env->AllocObject(capsClazz);
jclass profileLevelClazz =
@@ -163,6 +167,8 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
env->SetObjectField(caps, profileLevelsField, profileLevelArray);
+ env->SetIntField(caps, flagsField, flags);
+
env->DeleteLocalRef(profileLevelArray);
profileLevelArray = NULL;
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index 16a1e48..60142cd 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -753,7 +753,9 @@ static jbyteArray android_media_MediaDrm_provideKeyResponse(
status_t err = drm->provideKeyResponse(sessionId, response, keySetId);
- throwExceptionAsNecessary(env, err, "Failed to handle key response");
+ if (throwExceptionAsNecessary(env, err, "Failed to handle key response")) {
+ return NULL;
+ }
return VectorToJByteArray(env, keySetId);
}
@@ -1104,7 +1106,9 @@ static jbyteArray android_media_MediaDrm_encryptNative(
status_t err = drm->encrypt(sessionId, keyId, input, iv, output);
- throwExceptionAsNecessary(env, err, "Failed to encrypt");
+ if (throwExceptionAsNecessary(env, err, "Failed to encrypt")) {
+ return NULL;
+ }
return VectorToJByteArray(env, output);
}
@@ -1132,7 +1136,9 @@ static jbyteArray android_media_MediaDrm_decryptNative(
Vector<uint8_t> output;
status_t err = drm->decrypt(sessionId, keyId, input, iv, output);
- throwExceptionAsNecessary(env, err, "Failed to decrypt");
+ if (throwExceptionAsNecessary(env, err, "Failed to decrypt")) {
+ return NULL;
+ }
return VectorToJByteArray(env, output);
}
@@ -1160,7 +1166,9 @@ static jbyteArray android_media_MediaDrm_signNative(
status_t err = drm->sign(sessionId, keyId, message, signature);
- throwExceptionAsNecessary(env, err, "Failed to sign");
+ if (throwExceptionAsNecessary(env, err, "Failed to sign")) {
+ return NULL;
+ }
return VectorToJByteArray(env, signature);
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
index 9057f60..624bbaa 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java
@@ -25,6 +25,8 @@ import android.hardware.IProCameraUser;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.utils.BinderHolder;
+import android.hardware.camera2.utils.CameraBinderDecorator;
import android.os.Binder;
import android.os.IBinder;
import android.os.RemoteException;
@@ -109,9 +111,11 @@ public class CameraBinderTest extends AndroidTestCase {
String clientPackageName = getContext().getPackageName();
- ICamera cameraUser = mUtils.getCameraService().connect(dummyCallbacks, cameraId,
- clientPackageName,
- CameraBinderTestUtils.USE_CALLING_UID);
+ BinderHolder holder = new BinderHolder();
+ CameraBinderDecorator.newInstance(mUtils.getCameraService())
+ .connect(dummyCallbacks, cameraId, clientPackageName,
+ CameraBinderTestUtils.USE_CALLING_UID, holder);
+ ICamera cameraUser = ICamera.Stub.asInterface(holder.getBinder());
assertNotNull(String.format("Camera %s was null", cameraId), cameraUser);
Log.v(TAG, String.format("Camera %s connected", cameraId));
@@ -131,9 +135,11 @@ public class CameraBinderTest extends AndroidTestCase {
String clientPackageName = getContext().getPackageName();
- IProCameraUser cameraUser = mUtils.getCameraService().connectPro(dummyCallbacks,
- cameraId,
- clientPackageName, CameraBinderTestUtils.USE_CALLING_UID);
+ BinderHolder holder = new BinderHolder();
+ CameraBinderDecorator.newInstance(mUtils.getCameraService())
+ .connectPro(dummyCallbacks, cameraId,
+ clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder);
+ IProCameraUser cameraUser = IProCameraUser.Stub.asInterface(holder.getBinder());
assertNotNull(String.format("Camera %s was null", cameraId), cameraUser);
Log.v(TAG, String.format("Camera %s connected", cameraId));
@@ -161,9 +167,11 @@ public class CameraBinderTest extends AndroidTestCase {
String clientPackageName = getContext().getPackageName();
- ICameraDeviceUser cameraUser = mUtils.getCameraService().connectDevice(dummyCallbacks,
- cameraId,
- clientPackageName, CameraBinderTestUtils.USE_CALLING_UID);
+ BinderHolder holder = new BinderHolder();
+ CameraBinderDecorator.newInstance(mUtils.getCameraService())
+ .connectDevice(dummyCallbacks, cameraId,
+ clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder);
+ ICameraDeviceUser cameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder());
assertNotNull(String.format("Camera %s was null", cameraId), cameraUser);
Log.v(TAG, String.format("Camera %s connected", cameraId));
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
index bdf14ff..722087c 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java
@@ -18,10 +18,11 @@ package com.android.mediaframeworktest.integration;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraMetadata;
-import android.hardware.camera2.CameraPropertiesKeys;
+import android.hardware.camera2.CameraProperties;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.utils.BinderHolder;
import android.os.RemoteException;
import android.test.AndroidTestCase;
import android.test.suitebuilder.annotation.SmallTest;
@@ -39,8 +40,8 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
private static String TAG = "CameraDeviceBinderTest";
// Number of streaming callbacks need to check.
private static int NUM_CALLBACKS_CHECKED = 10;
- // Wait for capture result timeout value: 1000ms
- private final static int WAIT_FOR_COMPLETE_TIMEOUT_MS = 1000;
+ // Wait for capture result timeout value: 1500ms
+ private final static int WAIT_FOR_COMPLETE_TIMEOUT_MS = 1500;
private int mCameraId;
private ICameraDeviceUser mCameraUser;
@@ -129,8 +130,10 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
mMockCb = spy(dummyCallbacks);
- mCameraUser = mUtils.getCameraService().connectDevice(mMockCb, mCameraId,
- clientPackageName, CameraBinderTestUtils.USE_CALLING_UID);
+ BinderHolder holder = new BinderHolder();
+ mUtils.getCameraService().connectDevice(mMockCb, mCameraId,
+ clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder);
+ mCameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder());
assertNotNull(String.format("Camera %s was null", mCameraId), mCameraUser);
createDefaultSurface();
@@ -268,7 +271,7 @@ public class CameraDeviceBinderTest extends AndroidTestCase {
assertEquals(CameraBinderTestUtils.NO_ERROR, status);
assertFalse(info.isEmpty());
- assertNotNull(info.get(CameraPropertiesKeys.Scaler.AVAILABLE_FORMATS));
+ assertNotNull(info.get(CameraProperties.SCALER_AVAILABLE_FORMATS));
}
@SmallTest
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
index 074bfe4..2d26ac7 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
@@ -86,9 +86,9 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
private Writer mProcMemWriter;
private Writer mMemWriter;
- private CamcorderProfile mCamcorderProfile = CamcorderProfile.get(CAMERA_ID);
- private int mVideoWidth = mCamcorderProfile.videoFrameWidth;
- private int mVideoHeight = mCamcorderProfile.videoFrameHeight;
+ private CamcorderProfile mCamcorderProfile;
+ private int mVideoWidth;
+ private int mVideoHeight;
Camera mCamera;
@@ -99,6 +99,12 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
@Override
protected void setUp() throws Exception {
super.setUp();
+ //Check if the device support the camcorder
+ CamcorderProfile mCamcorderProfile = CamcorderProfile.get(CAMERA_ID);
+ if (mCamcorderProfile != null) {
+ mVideoWidth = mCamcorderProfile.videoFrameWidth;
+ mVideoHeight = mCamcorderProfile.videoFrameHeight;
+ }
//Insert a 2 second before launching the test activity. This is
//the workaround for the race condition of requesting the updated surface.
Thread.sleep(2000);
@@ -332,7 +338,7 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
// USER PID PPID VSIZE RSS WCHAN PC NAME
// media 131 1 13676 4796 ffffffff 400b1bd0 S media.log
// media 219 131 37768 6892 ffffffff 400b236c S /system/bin/mediaserver
- String memusage = poList[2].concat("\n");
+ String memusage = poList[poList.length-1].concat("\n");
return memusage;
}
@@ -410,59 +416,65 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase2<Med
// Test case 4: Capture the memory usage after every 20 video only recorded
@LargeTest
public void testH263RecordVideoOnlyMemoryUsage() throws Exception {
- boolean memoryResult = false;
- mStartPid = getMediaserverPid();
- int frameRate = MediaProfileReader.getMaxFrameRateForCodec(MediaRecorder.VideoEncoder.H263);
- assertTrue("H263 video recording frame rate", frameRate != -1);
- for (int i = 0; i < NUM_STRESS_LOOP; i++) {
- assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
- MediaRecorder.VideoEncoder.H263, MediaRecorder.OutputFormat.MPEG_4,
- MediaNames.RECORDED_VIDEO_3GP, true));
- getMemoryWriteToLog(i);
- writeProcmemInfo();
+ if (mCamcorderProfile != null) {
+ boolean memoryResult = false;
+ mStartPid = getMediaserverPid();
+ int frameRate = MediaProfileReader
+ .getMaxFrameRateForCodec(MediaRecorder.VideoEncoder.H263);
+ assertTrue("H263 video recording frame rate", frameRate != -1);
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
+ MediaRecorder.VideoEncoder.H263, MediaRecorder.OutputFormat.MPEG_4,
+ MediaNames.RECORDED_VIDEO_3GP, true));
+ getMemoryWriteToLog(i);
+ writeProcmemInfo();
+ }
+ memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
+ assertTrue("H263 record only memory test", memoryResult);
}
- memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
- assertTrue("H263 record only memory test", memoryResult);
}
// Test case 5: Capture the memory usage after every 20 video only recorded
@LargeTest
public void testMpeg4RecordVideoOnlyMemoryUsage() throws Exception {
- boolean memoryResult = false;
-
- mStartPid = getMediaserverPid();
- int frameRate = MediaProfileReader.getMaxFrameRateForCodec
- (MediaRecorder.VideoEncoder.MPEG_4_SP);
- assertTrue("MPEG4 video recording frame rate", frameRate != -1);
- for (int i = 0; i < NUM_STRESS_LOOP; i++) {
- assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
- MediaRecorder.VideoEncoder.MPEG_4_SP, MediaRecorder.OutputFormat.MPEG_4,
- MediaNames.RECORDED_VIDEO_3GP, true));
- getMemoryWriteToLog(i);
- writeProcmemInfo();
+ if (mCamcorderProfile != null) {
+ boolean memoryResult = false;
+ mStartPid = getMediaserverPid();
+ int frameRate = MediaProfileReader.getMaxFrameRateForCodec
+ (MediaRecorder.VideoEncoder.MPEG_4_SP);
+ assertTrue("MPEG4 video recording frame rate", frameRate != -1);
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
+ MediaRecorder.VideoEncoder.MPEG_4_SP, MediaRecorder.OutputFormat.MPEG_4,
+ MediaNames.RECORDED_VIDEO_3GP, true));
+ getMemoryWriteToLog(i);
+ writeProcmemInfo();
+ }
+ memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
+ assertTrue("mpeg4 record only memory test", memoryResult);
}
- memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
- assertTrue("mpeg4 record only memory test", memoryResult);
}
// Test case 6: Capture the memory usage after every 20 video and audio
// recorded
@LargeTest
public void testRecordVideoAudioMemoryUsage() throws Exception {
- boolean memoryResult = false;
-
- mStartPid = getMediaserverPid();
- int frameRate = MediaProfileReader.getMaxFrameRateForCodec(MediaRecorder.VideoEncoder.H263);
- assertTrue("H263 video recording frame rate", frameRate != -1);
- for (int i = 0; i < NUM_STRESS_LOOP; i++) {
- assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
- MediaRecorder.VideoEncoder.H263, MediaRecorder.OutputFormat.MPEG_4,
- MediaNames.RECORDED_VIDEO_3GP, false));
- getMemoryWriteToLog(i);
- writeProcmemInfo();
+ if (mCamcorderProfile != null) {
+ boolean memoryResult = false;
+ mStartPid = getMediaserverPid();
+ int frameRate = MediaProfileReader
+ .getMaxFrameRateForCodec(MediaRecorder.VideoEncoder.H263);
+ assertTrue("H263 video recording frame rate", frameRate != -1);
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ assertTrue(stressVideoRecord(frameRate, mVideoWidth, mVideoHeight,
+ MediaRecorder.VideoEncoder.H263, MediaRecorder.OutputFormat.MPEG_4,
+ MediaNames.RECORDED_VIDEO_3GP, false));
+ getMemoryWriteToLog(i);
+ writeProcmemInfo();
+ }
+ memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
+ assertTrue("H263 audio video record memory test", memoryResult);
}
- memoryResult = validateMemoryResult(mStartPid, mStartMemory, ENCODER_LIMIT);
- assertTrue("H263 audio video record memory test", memoryResult);
}
// Test case 7: Capture the memory usage after every 20 audio only recorded