diff options
Diffstat (limited to 'media')
9 files changed, 246 insertions, 45 deletions
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java index 14cdbb7..ef02cfd 100644 --- a/media/java/android/media/AudioManager.java +++ b/media/java/android/media/AudioManager.java @@ -2387,6 +2387,35 @@ public class AudioManager { } } + /** + * @hide + * Notifies AudioService that it is connected to an A2DP device that supports absolute volume, + * so that AudioService can send volume change events to the A2DP device, rather than handling + * them. + */ + public void avrcpSupportsAbsoluteVolume(String address, boolean support) { + IAudioService service = getService(); + try { + service.avrcpSupportsAbsoluteVolume(address, support); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in avrcpSupportsAbsoluteVolume", e); + } + } + + /** + * @hide + * Notifies AudioService of the volume set on the A2DP device as a callback, so AudioService + * is able to update the UI. + */ + public void avrcpUpdateVolume(int oldVolume, int volume) { + IAudioService service = getService(); + try { + service.avrcpUpdateVolume(oldVolume, volume); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in avrcpUpdateVolume", e); + } + } + /** * {@hide} */ diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java index 290866e..470c571 100644 --- a/media/java/android/media/AudioService.java +++ b/media/java/android/media/AudioService.java @@ -460,6 +460,12 @@ public class AudioService extends IAudioService.Stub { private final MediaFocusControl mMediaFocusControl; + // Reference to BluetoothA2dp to query for AbsoluteVolume. + private BluetoothA2dp mA2dp; + private final Object mA2dpAvrcpLock = new Object(); + // If absolute volume is supported in AVRCP device + private boolean mAvrcpAbsVolSupported = false; + /////////////////////////////////////////////////////////////////////////// // Construction /////////////////////////////////////////////////////////////////////////// @@ -901,6 +907,15 @@ public class AudioService extends IAudioService.Stub { int oldIndex = mStreamStates[streamType].getIndex(device); if (adjustVolume && (direction != AudioManager.ADJUST_SAME)) { + // Check if volume update should be send to AVRCP + synchronized (mA2dpAvrcpLock) { + if (mA2dp != null && mAvrcpAbsVolSupported) { + mA2dp.adjustAvrcpAbsoluteVolume(direction); + return; + // No need to send volume update, because we will update the volume with a + // callback from Avrcp. + } + } if ((direction == AudioManager.ADJUST_RAISE) && !checkSafeMediaVolume(streamTypeAlias, aliasIndex + step, device)) { Log.e(TAG, "adjustStreamVolume() safe volume index = "+oldIndex); @@ -998,6 +1013,15 @@ public class AudioService extends IAudioService.Stub { index = rescaleIndex(index * 10, streamType, streamTypeAlias); + synchronized (mA2dpAvrcpLock) { + if (mA2dp != null && mAvrcpAbsVolSupported) { + mA2dp.setAvrcpAbsoluteVolume(index); + return; + // No need to send volume update, because we will update the volume with a + // callback from Avrcp. + } + } + flags &= ~AudioManager.FLAG_FIXED_VOLUME; if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) && ((device & mFixedVolumeDevices) != 0)) { @@ -2268,21 +2292,23 @@ public class AudioService extends IAudioService.Stub { List<BluetoothDevice> deviceList; switch(profile) { case BluetoothProfile.A2DP: - BluetoothA2dp a2dp = (BluetoothA2dp) proxy; - deviceList = a2dp.getConnectedDevices(); - if (deviceList.size() > 0) { - btDevice = deviceList.get(0); - synchronized (mConnectedDevices) { - int state = a2dp.getConnectionState(btDevice); - int delay = checkSendBecomingNoisyIntent( - AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, - (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0); - queueMsgUnderWakeLock(mAudioHandler, - MSG_SET_A2DP_CONNECTION_STATE, - state, - 0, - btDevice, - delay); + synchronized (mA2dpAvrcpLock) { + mA2dp = (BluetoothA2dp) proxy; + deviceList = mA2dp.getConnectedDevices(); + if (deviceList.size() > 0) { + btDevice = deviceList.get(0); + synchronized (mConnectedDevices) { + int state = mA2dp.getConnectionState(btDevice); + int delay = checkSendBecomingNoisyIntent( + AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, + (state == BluetoothA2dp.STATE_CONNECTED) ? 1 : 0); + queueMsgUnderWakeLock(mAudioHandler, + MSG_SET_A2DP_CONNECTION_STATE, + state, + 0, + btDevice, + delay); + } } } break; @@ -2344,10 +2370,13 @@ public class AudioService extends IAudioService.Stub { public void onServiceDisconnected(int profile) { switch(profile) { case BluetoothProfile.A2DP: - synchronized (mConnectedDevices) { - if (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)) { - makeA2dpDeviceUnavailableNow( - mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)); + synchronized (mA2dpAvrcpLock) { + mA2dp = null; + synchronized (mConnectedDevices) { + if (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)) { + makeA2dpDeviceUnavailableNow( + mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)); + } } } break; @@ -3697,6 +3726,7 @@ public class AudioService extends IAudioService.Stub { private void onSetA2dpConnectionState(BluetoothDevice btDevice, int state) { + if (DEBUG_VOL) Log.d(TAG, "onSetA2dpConnectionState btDevice="+btDevice+" state="+state); if (btDevice == null) { return; } @@ -3704,6 +3734,20 @@ public class AudioService extends IAudioService.Stub { if (!BluetoothAdapter.checkBluetoothAddress(address)) { address = ""; } + + // Disable absolute volume, if device is disconnected + synchronized (mA2dpAvrcpLock) { + if (state == BluetoothProfile.STATE_DISCONNECTED && mAvrcpAbsVolSupported) { + mAvrcpAbsVolSupported = false; + sendMsg(mAudioHandler, + MSG_SET_DEVICE_VOLUME, + SENDMSG_QUEUE, + getDeviceForStream(AudioSystem.STREAM_MUSIC), + 0, + mStreamStates[AudioSystem.STREAM_MUSIC], + 0); + } + } synchronized (mConnectedDevices) { boolean isConnected = (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) && @@ -3754,6 +3798,31 @@ public class AudioService extends IAudioService.Stub { } } + public void avrcpSupportsAbsoluteVolume(String address, boolean support) { + // address is not used for now, but may be used when multiple a2dp devices are supported + synchronized (mA2dpAvrcpLock) { + mAvrcpAbsVolSupported = support; + if (support) { + VolumeStreamState streamState = mStreamStates[AudioSystem.STREAM_MUSIC]; + int device = getDeviceForStream(AudioSystem.STREAM_MUSIC); + streamState.setIndex(streamState.getMaxIndex(), device); + sendMsg(mAudioHandler, + MSG_SET_DEVICE_VOLUME, + SENDMSG_QUEUE, + device, + 0, + streamState, + 0); + } + } + } + + public void avrcpUpdateVolume(int oldVolume, int volume) { + mStreamStates[AudioSystem.STREAM_MUSIC]. + setIndex(volume, getDeviceForStream(AudioSystem.STREAM_MUSIC)); + sendVolumeUpdate(AudioSystem.STREAM_MUSIC, oldVolume, volume, AudioManager.FLAG_SHOW_UI); + } + private boolean handleDeviceConnection(boolean connected, int device, String params) { synchronized (mConnectedDevices) { boolean isConnected = (mConnectedDevices.containsKey(device) && diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl index b4c8a04..903927b 100644 --- a/media/java/android/media/IAudioService.aidl +++ b/media/java/android/media/IAudioService.aidl @@ -98,6 +98,10 @@ interface IAudioService { oneway void reloadAudioSettings(); + oneway void avrcpSupportsAbsoluteVolume(String address, boolean support); + + oneway void avrcpUpdateVolume(int oldVolume, int volume); + void setSpeakerphoneOn(boolean on); boolean isSpeakerphoneOn(); diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java index 8ddc094..f3356c9 100644 --- a/media/java/android/media/ImageReader.java +++ b/media/java/android/media/ImageReader.java @@ -20,7 +20,6 @@ import android.graphics.ImageFormat; import android.graphics.PixelFormat; import android.os.Handler; import android.os.Looper; -import android.os.Message; import android.view.Surface; import java.lang.ref.WeakReference; @@ -130,11 +129,26 @@ public final class ImageReader implements AutoCloseable { } /** - * <p>Get the next Image from the ImageReader's queue. Returns {@code null} - * if no new image is available.</p> + * <p> + * Get the next Image from the ImageReader's queue. Returns {@code null} if + * no new image is available. + * </p> + * <p> + * This operation will fail by throwing an + * {@link Surface.OutOfResourcesException OutOfResourcesException} if too + * many images have been acquired with {@link #getNextImage}. In particular + * a sequence of {@link #getNextImage} calls greater than {@link #getMaxImages} + * without calling {@link Image#close} or {@link #releaseImage} in-between + * will exhaust the underlying queue. At such a time, + * {@link Surface.OutOfResourcesException OutOfResourcesException} will be + * thrown until more images are released with {@link Image#close} or + * {@link #releaseImage}. + * </p> * * @return a new frame of image data, or {@code null} if no image data is - * available. + * available. + * @throws Surface.OutOfResourcesException if too many images are currently + * acquired */ public Image getNextImage() { SurfaceImage si = new SurfaceImage(); @@ -172,6 +186,8 @@ public final class ImageReader implements AutoCloseable { * @param listener the listener that will be run * @param handler The handler on which the listener should be invoked, or null * if the listener should be invoked on the calling thread's looper. + * + * @throws IllegalArgumentException if no handler specified and the calling thread has no looper */ public void setImageAvailableListener(OnImageAvailableListener listener, Handler handler) { mImageListener = listener; @@ -260,8 +276,9 @@ public final class ImageReader implements AutoCloseable { * Called from Native code when an Event happens. */ private static void postEventFromNative(Object selfRef) { - WeakReference weakSelf = (WeakReference)selfRef; - final ImageReader ir = (ImageReader)weakSelf.get(); + @SuppressWarnings("unchecked") + WeakReference<ImageReader> weakSelf = (WeakReference<ImageReader>)selfRef; + final ImageReader ir = weakSelf.get(); if (ir == null) { return; } diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java index 3fbaf69..949a42c 100644 --- a/media/java/android/media/MediaFormat.java +++ b/media/java/android/media/MediaFormat.java @@ -26,7 +26,7 @@ import java.util.Map; * * The format of the media data is specified as string/value pairs. * - * Keys common to all formats, <b>all keys not marked optional are mandatory</b>: + * Keys common to all audio/video formats, <b>all keys not marked optional are mandatory</b>: * * <table> * <tr><th>Name</th><th>Value Type</th><th>Description</th></tr> @@ -44,6 +44,8 @@ import java.util.Map; * for encoders, readable in the output format of decoders</b></td></tr> * <tr><td>{@link #KEY_FRAME_RATE}</td><td>Integer or Float</td><td><b>encoder-only</b></td></tr> * <tr><td>{@link #KEY_I_FRAME_INTERVAL}</td><td>Integer</td><td><b>encoder-only</b></td></tr> + * <tr><td>{@link #KEY_REPEAT_PREVIOUS_FRAME_AFTER}</td><td>Long</td><td><b>video encoder in surface-mode only</b></td></tr> + * <tr><td>{@link #KEY_PUSH_BLANK_BUFFERS_ON_STOP}</td><td>Integer(1)</td><td><b>video decoder rendering to a surface only</b></td></tr> * </table> * * Audio formats have the following keys: @@ -57,6 +59,11 @@ import java.util.Map; * <tr><td>{@link #KEY_FLAC_COMPRESSION_LEVEL}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is FLAC audio, specifies the desired compression level.</td></tr> * </table> * + * Subtitle formats have the following keys: + * <table> + * <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr> + * <tr><td>{@link #KEY_LANGUAGE}</td><td>String</td><td>The language of the content.</td></tr> + * </table> */ public final class MediaFormat { private Map<String, Object> mMap; @@ -68,6 +75,12 @@ public final class MediaFormat { public static final String KEY_MIME = "mime"; /** + * A key describing the language of the content. + * The associated value is a string. + */ + public static final String KEY_LANGUAGE = "language"; + + /** * A key describing the sample rate of an audio format. * The associated value is an integer */ @@ -132,6 +145,24 @@ public final class MediaFormat { public static final String KEY_SLICE_HEIGHT = "slice-height"; /** + * Applies only when configuring a video encoder in "surface-input" mode. + * The associated value is a long and gives the time in microseconds + * after which the frame previously submitted to the encoder will be + * repeated (once) if no new frame became available since. + */ + public static final String KEY_REPEAT_PREVIOUS_FRAME_AFTER + = "repeat-previous-frame-after"; + + /** + * If specified when configuring a video decoder rendering to a surface, + * causes the decoder to output "blank", i.e. black frames to the surface + * when stopped to clear out any previously displayed contents. + * The associated value is an integer of value 1. + */ + public static final String KEY_PUSH_BLANK_BUFFERS_ON_STOP + = "push-blank-buffers-on-shutdown"; + + /** * A key describing the duration (in microseconds) of the content. * The associated value is a long. */ @@ -277,6 +308,23 @@ public final class MediaFormat { } /** + * Creates a minimal subtitle format. + * @param mime The mime type of the content. + * @param language The language of the content. Specify "und" if language + * information is only included in the content (similarly, if there + * are multiple language tracks in the content.) + */ + public static final MediaFormat createSubtitleFormat( + String mime, + String language) { + MediaFormat format = new MediaFormat(); + format.setString(KEY_MIME, mime); + format.setString(KEY_LANGUAGE, language); + + return format; + } + + /** * Creates a minimal video format. * @param mime The mime type of the content. * @param width The width of the content (in pixels) diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp index cd589de..7d914d2 100644 --- a/media/jni/android_media_ImageReader.cpp +++ b/media/jni/android_media_ImageReader.cpp @@ -44,6 +44,9 @@ using namespace android; +static const char* const OutOfResourcesException = + "android/view/Surface$OutOfResourcesException"; + enum { IMAGE_READER_MAX_NUM_PLANES = 3, }; @@ -609,7 +612,8 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz, nativeFormat = Image_getPixelFormat(env, format); sp<BufferQueue> bq = new BufferQueue(); - sp<CpuConsumer> consumer = new CpuConsumer(bq, true, maxImages); + sp<CpuConsumer> consumer = new CpuConsumer(bq, maxImages, + /*controlledByApp*/true); // TODO: throw dvm exOutOfMemoryError? if (consumer == NULL) { jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer"); @@ -702,7 +706,17 @@ static jboolean ImageReader_imageSetup(JNIEnv* env, jobject thiz, status_t res = consumer->lockNextBuffer(buffer); if (res != NO_ERROR) { if (res != BAD_VALUE /*no buffers*/) { - ALOGE("%s Fail to lockNextBuffer with error: %d ", __FUNCTION__, res); + if (res == NOT_ENOUGH_DATA) { + jniThrowException(env, OutOfResourcesException, + "Too many outstanding images, close existing images" + " to be able to acquire more."); + } else { + ALOGE("%s Fail to lockNextBuffer with error: %d ", + __FUNCTION__, res); + jniThrowExceptionFmt(env, "java/lang/IllegalStateException", + "Unknown error (%d) when we tried to lock buffer.", + res); + } } return false; } @@ -714,6 +728,7 @@ static jboolean ImageReader_imageSetup(JNIEnv* env, jobject thiz, ALOGE("crop left: %d, top = %d", lt.x, lt.y); jniThrowException(env, "java/lang/UnsupportedOperationException", "crop left top corner need to at origin"); + return false; } // Check if the producer buffer configurations match what ImageReader configured. diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp index 16a1e48..60142cd 100644 --- a/media/jni/android_media_MediaDrm.cpp +++ b/media/jni/android_media_MediaDrm.cpp @@ -753,7 +753,9 @@ static jbyteArray android_media_MediaDrm_provideKeyResponse( status_t err = drm->provideKeyResponse(sessionId, response, keySetId); - throwExceptionAsNecessary(env, err, "Failed to handle key response"); + if (throwExceptionAsNecessary(env, err, "Failed to handle key response")) { + return NULL; + } return VectorToJByteArray(env, keySetId); } @@ -1104,7 +1106,9 @@ static jbyteArray android_media_MediaDrm_encryptNative( status_t err = drm->encrypt(sessionId, keyId, input, iv, output); - throwExceptionAsNecessary(env, err, "Failed to encrypt"); + if (throwExceptionAsNecessary(env, err, "Failed to encrypt")) { + return NULL; + } return VectorToJByteArray(env, output); } @@ -1132,7 +1136,9 @@ static jbyteArray android_media_MediaDrm_decryptNative( Vector<uint8_t> output; status_t err = drm->decrypt(sessionId, keyId, input, iv, output); - throwExceptionAsNecessary(env, err, "Failed to decrypt"); + if (throwExceptionAsNecessary(env, err, "Failed to decrypt")) { + return NULL; + } return VectorToJByteArray(env, output); } @@ -1160,7 +1166,9 @@ static jbyteArray android_media_MediaDrm_signNative( status_t err = drm->sign(sessionId, keyId, message, signature); - throwExceptionAsNecessary(env, err, "Failed to sign"); + if (throwExceptionAsNecessary(env, err, "Failed to sign")) { + return NULL; + } return VectorToJByteArray(env, signature); } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java index 9057f60..624bbaa 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraBinderTest.java @@ -25,6 +25,8 @@ import android.hardware.IProCameraUser; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.ICameraDeviceCallbacks; import android.hardware.camera2.ICameraDeviceUser; +import android.hardware.camera2.utils.BinderHolder; +import android.hardware.camera2.utils.CameraBinderDecorator; import android.os.Binder; import android.os.IBinder; import android.os.RemoteException; @@ -109,9 +111,11 @@ public class CameraBinderTest extends AndroidTestCase { String clientPackageName = getContext().getPackageName(); - ICamera cameraUser = mUtils.getCameraService().connect(dummyCallbacks, cameraId, - clientPackageName, - CameraBinderTestUtils.USE_CALLING_UID); + BinderHolder holder = new BinderHolder(); + CameraBinderDecorator.newInstance(mUtils.getCameraService()) + .connect(dummyCallbacks, cameraId, clientPackageName, + CameraBinderTestUtils.USE_CALLING_UID, holder); + ICamera cameraUser = ICamera.Stub.asInterface(holder.getBinder()); assertNotNull(String.format("Camera %s was null", cameraId), cameraUser); Log.v(TAG, String.format("Camera %s connected", cameraId)); @@ -131,9 +135,11 @@ public class CameraBinderTest extends AndroidTestCase { String clientPackageName = getContext().getPackageName(); - IProCameraUser cameraUser = mUtils.getCameraService().connectPro(dummyCallbacks, - cameraId, - clientPackageName, CameraBinderTestUtils.USE_CALLING_UID); + BinderHolder holder = new BinderHolder(); + CameraBinderDecorator.newInstance(mUtils.getCameraService()) + .connectPro(dummyCallbacks, cameraId, + clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder); + IProCameraUser cameraUser = IProCameraUser.Stub.asInterface(holder.getBinder()); assertNotNull(String.format("Camera %s was null", cameraId), cameraUser); Log.v(TAG, String.format("Camera %s connected", cameraId)); @@ -161,9 +167,11 @@ public class CameraBinderTest extends AndroidTestCase { String clientPackageName = getContext().getPackageName(); - ICameraDeviceUser cameraUser = mUtils.getCameraService().connectDevice(dummyCallbacks, - cameraId, - clientPackageName, CameraBinderTestUtils.USE_CALLING_UID); + BinderHolder holder = new BinderHolder(); + CameraBinderDecorator.newInstance(mUtils.getCameraService()) + .connectDevice(dummyCallbacks, cameraId, + clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder); + ICameraDeviceUser cameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder()); assertNotNull(String.format("Camera %s was null", cameraId), cameraUser); Log.v(TAG, String.format("Camera %s connected", cameraId)); diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java index bdf14ff..5225e23 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/integration/CameraDeviceBinderTest.java @@ -22,6 +22,7 @@ import android.hardware.camera2.CameraPropertiesKeys; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.ICameraDeviceCallbacks; import android.hardware.camera2.ICameraDeviceUser; +import android.hardware.camera2.utils.BinderHolder; import android.os.RemoteException; import android.test.AndroidTestCase; import android.test.suitebuilder.annotation.SmallTest; @@ -39,8 +40,8 @@ public class CameraDeviceBinderTest extends AndroidTestCase { private static String TAG = "CameraDeviceBinderTest"; // Number of streaming callbacks need to check. private static int NUM_CALLBACKS_CHECKED = 10; - // Wait for capture result timeout value: 1000ms - private final static int WAIT_FOR_COMPLETE_TIMEOUT_MS = 1000; + // Wait for capture result timeout value: 1500ms + private final static int WAIT_FOR_COMPLETE_TIMEOUT_MS = 1500; private int mCameraId; private ICameraDeviceUser mCameraUser; @@ -129,8 +130,10 @@ public class CameraDeviceBinderTest extends AndroidTestCase { mMockCb = spy(dummyCallbacks); - mCameraUser = mUtils.getCameraService().connectDevice(mMockCb, mCameraId, - clientPackageName, CameraBinderTestUtils.USE_CALLING_UID); + BinderHolder holder = new BinderHolder(); + mUtils.getCameraService().connectDevice(mMockCb, mCameraId, + clientPackageName, CameraBinderTestUtils.USE_CALLING_UID, holder); + mCameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder()); assertNotNull(String.format("Camera %s was null", mCameraId), mCameraUser); createDefaultSurface(); |