summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioAttributes.java17
-rw-r--r--media/java/android/media/AudioDevicePort.java7
-rw-r--r--media/java/android/media/AudioFocusInfo.aidl18
-rw-r--r--media/java/android/media/AudioFocusInfo.java175
-rw-r--r--media/java/android/media/AudioFormat.java23
-rw-r--r--media/java/android/media/AudioManager.java417
-rw-r--r--media/java/android/media/AudioManagerInternal.java24
-rw-r--r--media/java/android/media/AudioPatch.java21
-rw-r--r--media/java/android/media/AudioPort.java18
-rw-r--r--media/java/android/media/AudioPortEventHandler.java140
-rw-r--r--media/java/android/media/AudioRecord.java18
-rw-r--r--media/java/android/media/AudioService.java898
-rw-r--r--media/java/android/media/AudioSystem.java76
-rw-r--r--media/java/android/media/AudioTrack.java4
-rw-r--r--media/java/android/media/FocusRequester.java108
-rw-r--r--media/java/android/media/IAudioService.aidl26
-rw-r--r--media/java/android/media/ImageReader.java26
-rw-r--r--media/java/android/media/MediaCodecInfo.java23
-rw-r--r--media/java/android/media/MediaDrm.java10
-rw-r--r--media/java/android/media/MediaFocusControl.java207
-rw-r--r--media/java/android/media/MediaFormat.java10
-rw-r--r--media/java/android/media/MediaHTTPConnection.java6
-rw-r--r--media/java/android/media/MediaMetadata.java72
-rw-r--r--media/java/android/media/MediaPlayer.java28
-rw-r--r--media/java/android/media/MediaRecorder.java8
-rw-r--r--media/java/android/media/MediaScannerConnection.java2
-rw-r--r--media/java/android/media/Ringtone.java13
-rw-r--r--media/java/android/media/audiofx/AudioEffect.java4
-rw-r--r--media/java/android/media/audiopolicy/AudioMix.java54
-rw-r--r--media/java/android/media/audiopolicy/AudioMixingRule.java189
-rw-r--r--media/java/android/media/audiopolicy/AudioPolicy.java429
-rw-r--r--media/java/android/media/audiopolicy/AudioPolicyConfig.java74
-rw-r--r--media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl28
-rw-r--r--media/java/android/media/projection/MediaProjection.java15
-rw-r--r--media/java/android/media/session/MediaController.java6
-rw-r--r--media/java/android/media/session/MediaSession.java74
-rw-r--r--media/java/android/media/session/MediaSessionLegacyHelper.java34
-rw-r--r--media/java/android/media/session/MediaSessionManager.java8
-rw-r--r--media/java/android/media/session/PlaybackState.java36
-rw-r--r--media/java/android/media/tv/ITvInputManager.aidl1
-rw-r--r--media/java/android/media/tv/ITvInputSessionWrapper.java9
-rw-r--r--media/java/android/media/tv/TvContract.java30
-rw-r--r--media/java/android/media/tv/TvInputInfo.java3
-rw-r--r--media/java/android/media/tv/TvInputManager.java305
-rw-r--r--media/java/android/media/tv/TvInputService.java232
-rw-r--r--media/java/android/media/tv/TvStreamConfig.java14
-rw-r--r--media/java/android/media/tv/TvView.java188
-rw-r--r--media/java/android/service/media/MediaBrowserService.java35
-rw-r--r--media/jni/android_media_ImageReader.cpp68
-rw-r--r--media/jni/android_media_MediaDrm.cpp42
-rw-r--r--media/jni/android_media_MediaRecorder.cpp3
-rw-r--r--media/jni/android_mtp_MtpDatabase.cpp150
-rw-r--r--media/jni/audioeffect/android_media_AudioEffect.cpp20
-rw-r--r--media/tests/omxjpegdecoder/Android.mk47
-rw-r--r--media/tests/omxjpegdecoder/StreamSource.cpp53
-rw-r--r--media/tests/omxjpegdecoder/StreamSource.h56
-rw-r--r--media/tests/omxjpegdecoder/jpeg_decoder_bench.cpp123
-rw-r--r--media/tests/omxjpegdecoder/omx_jpeg_decoder.cpp160
-rw-r--r--media/tests/omxjpegdecoder/omx_jpeg_decoder.h61
59 files changed, 3509 insertions, 1437 deletions
diff --git a/media/java/android/media/AudioAttributes.java b/media/java/android/media/AudioAttributes.java
index 25dfee6..489f552 100644
--- a/media/java/android/media/AudioAttributes.java
+++ b/media/java/android/media/AudioAttributes.java
@@ -161,6 +161,12 @@ public final class AudioAttributes implements Parcelable {
* Usage value to use when the usage is for game audio.
*/
public final static int USAGE_GAME = 14;
+ /**
+ * @hide
+ * Usage value to use when feeding audio to the platform and replacing "traditional" audio
+ * source, such as audio capture devices.
+ */
+ public final static int USAGE_VIRTUAL_SOURCE = 15;
/**
* Flag defining a behavior where the audibility of the sound will be ensured by the system.
@@ -235,11 +241,11 @@ public final class AudioAttributes implements Parcelable {
/**
* @hide
- * CANDIDATE FOR PUBLIC API
* Return the capture preset.
* @return one of the values that can be set in {@link Builder#setCapturePreset(int)} or a
* negative value if none has been set.
*/
+ @SystemApi
public int getCapturePreset() {
return mSource;
}
@@ -374,6 +380,7 @@ public final class AudioAttributes implements Parcelable {
case USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
case USAGE_ASSISTANCE_SONIFICATION:
case USAGE_GAME:
+ case USAGE_VIRTUAL_SOURCE:
mUsage = usage;
break;
default:
@@ -501,6 +508,7 @@ public final class AudioAttributes implements Parcelable {
* {@link MediaRecorder.AudioSource#VOICE_COMMUNICATION}.
* @return the same Builder instance.
*/
+ @SystemApi
public Builder setCapturePreset(int preset) {
switch (preset) {
case MediaRecorder.AudioSource.DEFAULT:
@@ -518,14 +526,15 @@ public final class AudioAttributes implements Parcelable {
/**
* @hide
- * Same as {@link #setCapturePreset(int)} but authorizes the use of HOTWORD and
- * REMOTE_SUBMIX.
+ * Same as {@link #setCapturePreset(int)} but authorizes the use of HOTWORD,
+ * REMOTE_SUBMIX and FM_TUNER.
* @param preset
* @return the same Builder instance.
*/
public Builder setInternalCapturePreset(int preset) {
if ((preset == MediaRecorder.AudioSource.HOTWORD)
- || (preset == MediaRecorder.AudioSource.REMOTE_SUBMIX)) {
+ || (preset == MediaRecorder.AudioSource.REMOTE_SUBMIX)
+ || (preset == MediaRecorder.AudioSource.FM_TUNER)) {
mSource = preset;
} else {
setCapturePreset(preset);
diff --git a/media/java/android/media/AudioDevicePort.java b/media/java/android/media/AudioDevicePort.java
index 7975e04..b10736b 100644
--- a/media/java/android/media/AudioDevicePort.java
+++ b/media/java/android/media/AudioDevicePort.java
@@ -16,6 +16,8 @@
package android.media;
+import android.media.AudioSystem;
+
/**
* The AudioDevicePort is a specialized type of AudioPort
* describing an input (e.g microphone) or output device (e.g speaker)
@@ -85,8 +87,11 @@ public class AudioDevicePort extends AudioPort {
@Override
public String toString() {
+ String type = (mRole == ROLE_SOURCE ?
+ AudioSystem.getInputDeviceName(mType) :
+ AudioSystem.getOutputDeviceName(mType));
return "{" + super.toString()
- + ", mType:" + mType
+ + ", mType: " + type
+ ", mAddress: " + mAddress
+ "}";
}
diff --git a/media/java/android/media/AudioFocusInfo.aidl b/media/java/android/media/AudioFocusInfo.aidl
new file mode 100644
index 0000000..f925fda
--- /dev/null
+++ b/media/java/android/media/AudioFocusInfo.aidl
@@ -0,0 +1,18 @@
+/* Copyright 2014, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+package android.media;
+
+parcelable AudioFocusInfo;
diff --git a/media/java/android/media/AudioFocusInfo.java b/media/java/android/media/AudioFocusInfo.java
new file mode 100644
index 0000000..fbdda3c
--- /dev/null
+++ b/media/java/android/media/AudioFocusInfo.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.SystemApi;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.Objects;
+
+/**
+ * @hide
+ * A class to encapsulate information about an audio focus owner or request.
+ */
+@SystemApi
+public final class AudioFocusInfo implements Parcelable {
+
+ private AudioAttributes mAttributes;
+ private String mClientId;
+ private String mPackageName;
+ private int mGainRequest;
+ private int mLossReceived;
+ private int mFlags;
+
+
+ /**
+ * Class constructor
+ * @param aa
+ * @param clientId
+ * @param packageName
+ * @param gainRequest
+ * @param lossReceived
+ * @param flags
+ */
+ AudioFocusInfo(AudioAttributes aa, String clientId, String packageName,
+ int gainRequest, int lossReceived, int flags) {
+ mAttributes = aa == null ? new AudioAttributes.Builder().build() : aa;
+ mClientId = clientId == null ? "" : clientId;
+ mPackageName = packageName == null ? "" : packageName;
+ mGainRequest = gainRequest;
+ mLossReceived = lossReceived;
+ mFlags = flags;
+ }
+
+
+ /**
+ * The audio attributes for the audio focus request.
+ * @return non-null {@link AudioAttributes}.
+ */
+ @SystemApi
+ public AudioAttributes getAttributes() { return mAttributes; }
+
+ @SystemApi
+ public String getClientId() { return mClientId; }
+
+ @SystemApi
+ public String getPackageName() { return mPackageName; }
+
+ /**
+ * The type of audio focus gain request.
+ * @return one of {@link AudioManager#AUDIOFOCUS_GAIN},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}.
+ */
+ @SystemApi
+ public int getGainRequest() { return mGainRequest; }
+
+ /**
+ * The type of audio focus loss that was received by the
+ * {@link AudioManager.OnAudioFocusChangeListener} if one was set.
+ * @return 0 if focus wasn't lost, or one of {@link AudioManager#AUDIOFOCUS_LOSS},
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT} or
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}.
+ */
+ @SystemApi
+ public int getLossReceived() { return mLossReceived; }
+
+ /** @hide */
+ void clearLossReceived() { mLossReceived = 0; }
+
+ /**
+ * The flags set in the audio focus request.
+ * @return 0 or a combination of {link AudioManager#AUDIOFOCUS_FLAG_DELAY_OK},
+ * {@link AudioManager#AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS}, and
+ * {@link AudioManager#AUDIOFOCUS_FLAG_LOCK}.
+ */
+ @SystemApi
+ public int getFlags() { return mFlags; }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ mAttributes.writeToParcel(dest, flags);
+ dest.writeString(mClientId);
+ dest.writeString(mPackageName);
+ dest.writeInt(mGainRequest);
+ dest.writeInt(mLossReceived);
+ dest.writeInt(mFlags);
+ }
+
+ @SystemApi
+ @Override
+ public int hashCode() {
+ return Objects.hash(mAttributes, mClientId, mPackageName, mGainRequest, mFlags);
+ }
+
+ @SystemApi
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ AudioFocusInfo other = (AudioFocusInfo) obj;
+ if (!mAttributes.equals(other.mAttributes)) {
+ return false;
+ }
+ if (!mClientId.equals(other.mClientId)) {
+ return false;
+ }
+ if (!mPackageName.equals(other.mPackageName)) {
+ return false;
+ }
+ if (mGainRequest != other.mGainRequest) {
+ return false;
+ }
+ if (mLossReceived != other.mLossReceived) {
+ return false;
+ }
+ if (mFlags != other.mFlags) {
+ return false;
+ }
+ return true;
+ }
+
+ public static final Parcelable.Creator<AudioFocusInfo> CREATOR
+ = new Parcelable.Creator<AudioFocusInfo>() {
+
+ public AudioFocusInfo createFromParcel(Parcel in) {
+ return new AudioFocusInfo(
+ AudioAttributes.CREATOR.createFromParcel(in), //AudioAttributes aa
+ in.readString(), //String clientId
+ in.readString(), //String packageName
+ in.readInt(), //int gainRequest
+ in.readInt(), //int lossReceived
+ in.readInt() //int flags
+ );
+ }
+
+ public AudioFocusInfo[] newArray(int size) {
+ return new AudioFocusInfo[size];
+ }
+ };
+}
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index 58ed4f8..9a0266d 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -128,6 +128,29 @@ public class AudioFormat {
/**
* @hide
+ * Return the input channel mask corresponding to an output channel mask.
+ * This can be used for submix rerouting for the mask of the recorder to map to that of the mix.
+ * @param outMask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT
+ * @return a combination of CHANNEL_IN_* definitions matching an output channel mask
+ * @throws IllegalArgumentException
+ */
+ public static int inChannelMaskFromOutChannelMask(int outMask) throws IllegalArgumentException {
+ if (outMask == CHANNEL_OUT_DEFAULT) {
+ throw new IllegalArgumentException(
+ "Illegal CHANNEL_OUT_DEFAULT channel mask for input.");
+ }
+ switch (channelCountFromOutChannelMask(outMask)) {
+ case 1:
+ return CHANNEL_IN_MONO;
+ case 2:
+ return CHANNEL_IN_STEREO;
+ default:
+ throw new IllegalArgumentException("Unsupported channel configuration for input.");
+ }
+ }
+
+ /**
+ * @hide
* Return the number of channels from an input channel mask
* @param mask a combination of the CHANNEL_IN_* definitions, even CHANNEL_IN_DEFAULT
* @return number of channels for the mask
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 69c1142..67c0552 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -17,6 +17,7 @@
package android.media;
import android.Manifest;
+import android.annotation.NonNull;
import android.annotation.SdkConstant;
import android.annotation.SdkConstant.SdkConstantType;
import android.annotation.SystemApi;
@@ -64,7 +65,7 @@ public class AudioManager {
private final boolean mUseFixedVolume;
private final Binder mToken = new Binder();
private static String TAG = "AudioManager";
- AudioPortEventHandler mAudioPortEventHandler;
+ private static final AudioPortEventHandler sAudioPortEventHandler = new AudioPortEventHandler();
/**
* Broadcast intent, a hint for applications that audio is about to become
@@ -89,6 +90,17 @@ public class AudioManager {
public static final String RINGER_MODE_CHANGED_ACTION = "android.media.RINGER_MODE_CHANGED";
/**
+ * @hide
+ * Sticky broadcast intent action indicating that the internal ringer mode has
+ * changed. Includes the new ringer mode.
+ *
+ * @see #EXTRA_RINGER_MODE
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String INTERNAL_RINGER_MODE_CHANGED_ACTION =
+ "android.media.INTERNAL_RINGER_MODE_CHANGED_ACTION";
+
+ /**
* The new ringer mode.
*
* @see #RINGER_MODE_CHANGED_ACTION
@@ -126,6 +138,17 @@ public class AudioManager {
public static final String VOLUME_CHANGED_ACTION = "android.media.VOLUME_CHANGED_ACTION";
/**
+ * @hide Broadcast intent when a stream mute state changes.
+ * Includes the stream that changed and the new mute state
+ *
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_STREAM_VOLUME_MUTED
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String STREAM_MUTE_CHANGED_ACTION =
+ "android.media.STREAM_MUTE_CHANGED_ACTION";
+
+ /**
* @hide Broadcast intent when the master volume changes.
* Includes the new volume
*
@@ -209,6 +232,13 @@ public class AudioManager {
"android.media.EXTRA_MASTER_VOLUME_MUTED";
/**
+ * @hide The new stream volume mute state for the stream mute changed intent.
+ * Value is boolean
+ */
+ public static final String EXTRA_STREAM_VOLUME_MUTED =
+ "android.media.EXTRA_STREAM_VOLUME_MUTED";
+
+ /**
* Broadcast Action: Wired Headset plugged in or unplugged.
*
* You <em>cannot</em> receive this through components declared
@@ -353,21 +383,6 @@ public class AudioManager {
*/
@Deprecated public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS;
-
- /** @hide Default volume index values for audio streams */
- public static final int[] DEFAULT_STREAM_VOLUME = new int[] {
- 4, // STREAM_VOICE_CALL
- 7, // STREAM_SYSTEM
- 5, // STREAM_RING
- 11, // STREAM_MUSIC
- 6, // STREAM_ALARM
- 5, // STREAM_NOTIFICATION
- 7, // STREAM_BLUETOOTH_SCO
- 7, // STREAM_SYSTEM_ENFORCED
- 11, // STREAM_DTMF
- 11 // STREAM_TTS
- };
-
/**
* Increase the ringer volume.
*
@@ -484,6 +499,49 @@ public class AudioManager {
public static final int FLAG_SHOW_UI_WARNINGS = 1 << 10;
/**
+ * Adjusting the volume down from vibrated was prevented, display a hint in the UI.
+ * @hide
+ */
+ public static final int FLAG_SHOW_VIBRATE_HINT = 1 << 11;
+
+ private static final String[] FLAG_NAMES = {
+ "FLAG_SHOW_UI",
+ "FLAG_ALLOW_RINGER_MODES",
+ "FLAG_PLAY_SOUND",
+ "FLAG_REMOVE_SOUND_AND_VIBRATE",
+ "FLAG_VIBRATE",
+ "FLAG_FIXED_VOLUME",
+ "FLAG_BLUETOOTH_ABS_VOLUME",
+ "FLAG_SHOW_SILENT_HINT",
+ "FLAG_HDMI_SYSTEM_AUDIO_VOLUME",
+ "FLAG_ACTIVE_MEDIA_ONLY",
+ "FLAG_SHOW_UI_WARNINGS",
+ "FLAG_SHOW_VIBRATE_HINT",
+ };
+
+ /** @hide */
+ public static String flagsToString(int flags) {
+ final StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < FLAG_NAMES.length; i++) {
+ final int flag = 1 << i;
+ if ((flags & flag) != 0) {
+ if (sb.length() > 0) {
+ sb.append(',');
+ }
+ sb.append(FLAG_NAMES[i]);
+ flags &= ~flag;
+ }
+ }
+ if (flags != 0) {
+ if (sb.length() > 0) {
+ sb.append(',');
+ }
+ sb.append(flags);
+ }
+ return sb.toString();
+ }
+
+ /**
* Ringer mode that will be silent and will not vibrate. (This overrides the
* vibrate setting.)
*
@@ -512,8 +570,11 @@ public class AudioManager {
*/
public static final int RINGER_MODE_NORMAL = 2;
- // maximum valid ringer mode value. Values must start from 0 and be contiguous.
- private static final int RINGER_MODE_MAX = RINGER_MODE_NORMAL;
+ /**
+ * Maximum valid ringer mode value. Values must start from 0 and be contiguous.
+ * @hide
+ */
+ public static final int RINGER_MODE_MAX = RINGER_MODE_NORMAL;
/**
* Vibrate type that corresponds to the ringer.
@@ -585,9 +646,9 @@ public class AudioManager {
com.android.internal.R.bool.config_useMasterVolume);
mUseVolumeKeySounds = mContext.getResources().getBoolean(
com.android.internal.R.bool.config_useVolumeKeySounds);
- mAudioPortEventHandler = new AudioPortEventHandler(this);
mUseFixedVolume = mContext.getResources().getBoolean(
com.android.internal.R.bool.config_useFixedVolume);
+ sAudioPortEventHandler.init();
}
private static IAudioService getService()
@@ -685,11 +746,7 @@ public class AudioManager {
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
if (event.getRepeatCount() == 0) {
- if (mUseMasterVolume) {
- setMasterMute(!isMasterMute());
- } else {
- // TODO: Actually handle MUTE.
- }
+ MediaSessionLegacyHelper.getHelper(mContext).sendVolumeKeyEvent(event, false);
}
break;
}
@@ -720,6 +777,9 @@ public class AudioManager {
}
mVolumeKeyUpTime = SystemClock.uptimeMillis();
break;
+ case KeyEvent.KEYCODE_VOLUME_MUTE:
+ MediaSessionLegacyHelper.getHelper(mContext).sendVolumeKeyEvent(event, false);
+ break;
}
}
@@ -868,7 +928,7 @@ public class AudioManager {
public int getRingerMode() {
IAudioService service = getService();
try {
- return service.getRingerMode();
+ return service.getRingerModeExternal();
} catch (RemoteException e) {
Log.e(TAG, "Dead object in getRingerMode", e);
return RINGER_MODE_NORMAL;
@@ -887,7 +947,13 @@ public class AudioManager {
if (ringerMode < 0 || ringerMode > RINGER_MODE_MAX) {
return false;
}
- return true;
+ IAudioService service = getService();
+ try {
+ return service.isValidRingerMode(ringerMode);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in isValidRingerMode", e);
+ return false;
+ }
}
/**
@@ -982,21 +1048,12 @@ public class AudioManager {
* @see #isVolumeFixed()
*/
public void setRingerMode(int ringerMode) {
- setRingerMode(ringerMode, true /*checkZen*/);
- }
-
- /**
- * @see #setRingerMode(int)
- * @param checkZen Update zen mode if necessary to compensate.
- * @hide
- */
- public void setRingerMode(int ringerMode, boolean checkZen) {
if (!isValidRingerMode(ringerMode)) {
return;
}
IAudioService service = getService();
try {
- service.setRingerMode(ringerMode, checkZen);
+ service.setRingerModeExternal(ringerMode, mContext.getOpPackageName());
} catch (RemoteException e) {
Log.e(TAG, "Dead object in setRingerMode", e);
}
@@ -1216,6 +1273,9 @@ public class AudioManager {
* @hide
*/
public void forceVolumeControlStream(int streamType) {
+ if (mUseMasterVolume) {
+ return;
+ }
IAudioService service = getService();
try {
service.forceVolumeControlStream(streamType, mICallBack);
@@ -2207,6 +2267,8 @@ public class AudioManager {
listener = findFocusListener((String)msg.obj);
}
if (listener != null) {
+ Log.d(TAG, "AudioManager dispatching onAudioFocusChange("
+ + msg.what + ") for " + msg.obj);
listener.onAudioFocusChange(msg.what);
}
}
@@ -2276,6 +2338,14 @@ public class AudioManager {
* A successful focus change request.
*/
public static final int AUDIOFOCUS_REQUEST_GRANTED = 1;
+ /**
+ * @hide
+ * A focus change request whose granting is delayed: the request was successful, but the
+ * requester will only be granted audio focus once the condition that prevented immediate
+ * granting has ended.
+ * See {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ */
+ public static final int AUDIOFOCUS_REQUEST_DELAYED = 2;
/**
@@ -2297,20 +2367,168 @@ public class AudioManager {
*/
public int requestAudioFocus(OnAudioFocusChangeListener l, int streamType, int durationHint) {
int status = AUDIOFOCUS_REQUEST_FAILED;
+
+ try {
+ // status is guaranteed to be either AUDIOFOCUS_REQUEST_FAILED or
+ // AUDIOFOCUS_REQUEST_GRANTED as focus is requested without the
+ // AUDIOFOCUS_FLAG_DELAY_OK flag
+ status = requestAudioFocus(l,
+ new AudioAttributes.Builder()
+ .setInternalLegacyStreamType(streamType).build(),
+ durationHint,
+ 0 /* flags, legacy behavior */);
+ } catch (IllegalArgumentException e) {
+ Log.e(TAG, "Audio focus request denied due to ", e);
+ }
+
+ return status;
+ }
+
+ // when adding new flags, add them to the relevant AUDIOFOCUS_FLAGS_APPS or SYSTEM masks
+ /**
+ * @hide
+ * Use this flag when requesting audio focus to indicate it is ok for the requester to not be
+ * granted audio focus immediately (as indicated by {@link #AUDIOFOCUS_REQUEST_DELAYED}) when
+ * the system is in a state where focus cannot change, but be granted focus later when
+ * this condition ends.
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_DELAY_OK = 0x1 << 0;
+ /**
+ * @hide
+ * Use this flag when requesting audio focus to indicate that the requester
+ * will pause its media playback (if applicable) when losing audio focus with
+ * {@link #AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}, rather than ducking.
+ * <br>On some platforms, the ducking may be handled without the application being aware of it
+ * (i.e. it will not transiently lose focus). For applications that for instance play spoken
+ * content, such as audio book or podcast players, ducking may never be acceptable, and will
+ * thus always pause. This flag enables them to be declared as such whenever they request focus.
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS = 0x1 << 1;
+ /**
+ * @hide
+ * Use this flag to lock audio focus so granting is temporarily disabled.
+ * <br>This flag can only be used by owners of a registered
+ * {@link android.media.audiopolicy.AudioPolicy} in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int, AudioPolicy)}
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_LOCK = 0x1 << 2;
+ /** @hide */
+ public static final int AUDIOFOCUS_FLAGS_APPS = AUDIOFOCUS_FLAG_DELAY_OK
+ | AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS;
+ /** @hide */
+ public static final int AUDIOFOCUS_FLAGS_SYSTEM = AUDIOFOCUS_FLAG_DELAY_OK
+ | AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS | AUDIOFOCUS_FLAG_LOCK;
+
+ /**
+ * @hide
+ * Request audio focus.
+ * Send a request to obtain the audio focus. This method differs from
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)} in that it can express
+ * that the requester accepts delayed grants of audio focus.
+ * @param l the listener to be notified of audio focus changes. It is not allowed to be null
+ * when the request is flagged with {@link #AUDIOFOCUS_FLAG_DELAY_OK}.
+ * @param requestAttributes non null {@link AudioAttributes} describing the main reason for
+ * requesting audio focus.
+ * @param durationHint use {@link #AUDIOFOCUS_GAIN_TRANSIENT} to indicate this focus request
+ * is temporary, and focus will be abandonned shortly. Examples of transient requests are
+ * for the playback of driving directions, or notifications sounds.
+ * Use {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} to indicate also that it's ok for
+ * the previous focus owner to keep playing if it ducks its audio output.
+ * Alternatively use {@link #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE} for a temporary request
+ * that benefits from the system not playing disruptive sounds like notifications, for
+ * usecases such as voice memo recording, or speech recognition.
+ * Use {@link #AUDIOFOCUS_GAIN} for a focus request of unknown duration such
+ * as the playback of a song or a video.
+ * @param flags 0 or a combination of {link #AUDIOFOCUS_FLAG_DELAY_OK}
+ * and {@link #AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS}.
+ * <br>Use 0 when not using any flags for the request, which behaves like
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)}, where either audio
+ * focus is granted immediately, or the grant request fails because the system is in a
+ * state where focus cannot change (e.g. a phone call).
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED}, {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * or {@link #AUDIOFOCUS_REQUEST_DELAYED}.
+ * The return value is never {@link #AUDIOFOCUS_REQUEST_DELAYED} when focus is requested
+ * without the {@link #AUDIOFOCUS_FLAG_DELAY_OK} flag.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public int requestAudioFocus(OnAudioFocusChangeListener l,
+ @NonNull AudioAttributes requestAttributes,
+ int durationHint,
+ int flags) throws IllegalArgumentException {
+ if (flags != (flags & AUDIOFOCUS_FLAGS_APPS)) {
+ throw new IllegalArgumentException("Invalid flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
+ }
+ return requestAudioFocus(l, requestAttributes, durationHint,
+ flags & AUDIOFOCUS_FLAGS_APPS,
+ null /* no AudioPolicy*/);
+ }
+
+ /**
+ * @hide
+ * Request or lock audio focus.
+ * This method is to be used by system components that have registered an
+ * {@link android.media.audiopolicy.AudioPolicy} to request audio focus, but also to "lock" it
+ * so focus granting is temporarily disabled.
+ * @param l see the description of the same parameter in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @param requestAttributes non null {@link AudioAttributes} describing the main reason for
+ * requesting audio focus.
+ * @param durationHint see the description of the same parameter in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @param flags 0 or a combination of {link #AUDIOFOCUS_FLAG_DELAY_OK},
+ * {@link #AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS}, and {@link #AUDIOFOCUS_FLAG_LOCK}.
+ * <br>Use 0 when not using any flags for the request, which behaves like
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)}, where either audio
+ * focus is granted immediately, or the grant request fails because the system is in a
+ * state where focus cannot change (e.g. a phone call).
+ * @param ap a registered {@link android.media.audiopolicy.AudioPolicy} instance when locking
+ * focus, or null.
+ * @return see the description of the same return value in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public int requestAudioFocus(OnAudioFocusChangeListener l,
+ @NonNull AudioAttributes requestAttributes,
+ int durationHint,
+ int flags,
+ AudioPolicy ap) throws IllegalArgumentException {
+ // parameter checking
+ if (requestAttributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
if ((durationHint < AUDIOFOCUS_GAIN) ||
(durationHint > AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE)) {
- Log.e(TAG, "Invalid duration hint, audio focus request denied");
- return status;
+ throw new IllegalArgumentException("Invalid duration hint");
+ }
+ if (flags != (flags & AUDIOFOCUS_FLAGS_SYSTEM)) {
+ throw new IllegalArgumentException("Illegal flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
}
+ if (((flags & AUDIOFOCUS_FLAG_DELAY_OK) == AUDIOFOCUS_FLAG_DELAY_OK) && (l == null)) {
+ throw new IllegalArgumentException(
+ "Illegal null focus listener when flagged as accepting delayed focus grant");
+ }
+ if (((flags & AUDIOFOCUS_FLAG_LOCK) == AUDIOFOCUS_FLAG_LOCK) && (ap == null)) {
+ throw new IllegalArgumentException(
+ "Illegal null audio policy when locking audio focus");
+ }
+
+ int status = AUDIOFOCUS_REQUEST_FAILED;
registerAudioFocusListener(l);
- //TODO protect request by permission check?
IAudioService service = getService();
try {
- status = service.requestAudioFocus(streamType, durationHint, mICallBack,
+ status = service.requestAudioFocus(requestAttributes, durationHint, mICallBack,
mAudioFocusDispatcher, getIdForAudioFocusListener(l),
- mContext.getOpPackageName() /* package name */);
+ mContext.getOpPackageName() /* package name */, flags,
+ ap != null ? ap.cb() : null);
} catch (RemoteException e) {
- Log.e(TAG, "Can't call requestAudioFocus() on AudioService due to "+e);
+ Log.e(TAG, "Can't call requestAudioFocus() on AudioService:", e);
}
return status;
}
@@ -2328,11 +2546,15 @@ public class AudioManager {
public void requestAudioFocusForCall(int streamType, int durationHint) {
IAudioService service = getService();
try {
- service.requestAudioFocus(streamType, durationHint, mICallBack, null,
+ service.requestAudioFocus(new AudioAttributes.Builder()
+ .setInternalLegacyStreamType(streamType).build(),
+ durationHint, mICallBack, null,
MediaFocusControl.IN_VOICE_COMM_FOCUS_ID,
- mContext.getOpPackageName());
+ mContext.getOpPackageName(),
+ AUDIOFOCUS_FLAG_LOCK,
+ null /* policy token */);
} catch (RemoteException e) {
- Log.e(TAG, "Can't call requestAudioFocusForCall() on AudioService due to "+e);
+ Log.e(TAG, "Can't call requestAudioFocusForCall() on AudioService:", e);
}
}
@@ -2345,9 +2567,10 @@ public class AudioManager {
public void abandonAudioFocusForCall() {
IAudioService service = getService();
try {
- service.abandonAudioFocus(null, MediaFocusControl.IN_VOICE_COMM_FOCUS_ID);
+ service.abandonAudioFocus(null, MediaFocusControl.IN_VOICE_COMM_FOCUS_ID,
+ null /*AudioAttributes, legacy behavior*/);
} catch (RemoteException e) {
- Log.e(TAG, "Can't call abandonAudioFocusForCall() on AudioService due to "+e);
+ Log.e(TAG, "Can't call abandonAudioFocusForCall() on AudioService:", e);
}
}
@@ -2357,19 +2580,30 @@ public class AudioManager {
* @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
*/
public int abandonAudioFocus(OnAudioFocusChangeListener l) {
+ return abandonAudioFocus(l, null /*AudioAttributes, legacy behavior*/);
+ }
+
+ /**
+ * @hide
+ * Abandon audio focus. Causes the previous focus owner, if any, to receive focus.
+ * @param l the listener with which focus was requested.
+ * @param aa the {@link AudioAttributes} with which audio focus was requested
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ */
+ @SystemApi
+ public int abandonAudioFocus(OnAudioFocusChangeListener l, AudioAttributes aa) {
int status = AUDIOFOCUS_REQUEST_FAILED;
unregisterAudioFocusListener(l);
IAudioService service = getService();
try {
status = service.abandonAudioFocus(mAudioFocusDispatcher,
- getIdForAudioFocusListener(l));
+ getIdForAudioFocusListener(l), aa);
} catch (RemoteException e) {
- Log.e(TAG, "Can't call abandonAudioFocus() on AudioService due to "+e);
+ Log.e(TAG, "Can't call abandonAudioFocus() on AudioService:", e);
}
return status;
}
-
//====================================================================
// Remote Control
/**
@@ -2653,25 +2887,30 @@ public class AudioManager {
/**
* @hide
- * CANDIDATE FOR PUBLIC API
* Register the given {@link AudioPolicy}.
* This call is synchronous and blocks until the registration process successfully completed
* or failed to complete.
- * @param policy the {@link AudioPolicy} to register.
+ * @param policy the non-null {@link AudioPolicy} to register.
* @return {@link #ERROR} if there was an error communicating with the registration service
* or if the user doesn't have the required
* {@link android.Manifest.permission#MODIFY_AUDIO_ROUTING} permission,
* {@link #SUCCESS} otherwise.
*/
- public int registerAudioPolicy(AudioPolicy policy) {
+ @SystemApi
+ public int registerAudioPolicy(@NonNull AudioPolicy policy) {
if (policy == null) {
throw new IllegalArgumentException("Illegal null AudioPolicy argument");
}
IAudioService service = getService();
try {
- if (!service.registerAudioPolicy(policy.getConfig(), policy.token())) {
+ String regId = service.registerAudioPolicy(policy.getConfig(), policy.cb(),
+ policy.hasFocusListener());
+ if (regId == null) {
return ERROR;
+ } else {
+ policy.setRegistration(regId);
}
+ // successful registration
} catch (RemoteException e) {
Log.e(TAG, "Dead object in registerAudioPolicyAsync()", e);
return ERROR;
@@ -2681,16 +2920,17 @@ public class AudioManager {
/**
* @hide
- * CANDIDATE FOR PUBLIC API
- * @param policy the {@link AudioPolicy} to unregister.
+ * @param policy the non-null {@link AudioPolicy} to unregister.
*/
- public void unregisterAudioPolicyAsync(AudioPolicy policy) {
+ @SystemApi
+ public void unregisterAudioPolicyAsync(@NonNull AudioPolicy policy) {
if (policy == null) {
throw new IllegalArgumentException("Illegal null AudioPolicy argument");
}
IAudioService service = getService();
try {
- service.unregisterAudioPolicyAsync(policy.token());
+ service.unregisterAudioPolicyAsync(policy.cb());
+ policy.setRegistration(null);
} catch (RemoteException e) {
Log.e(TAG, "Dead object in unregisterAudioPolicyAsync()", e);
}
@@ -3152,6 +3392,31 @@ public class AudioManager {
}
/**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public void setRingerModeInternal(int ringerMode) {
+ try {
+ getService().setRingerModeInternal(ringerMode, mContext.getOpPackageName());
+ } catch (RemoteException e) {
+ Log.w(TAG, "Error calling setRingerModeInternal", e);
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public int getRingerModeInternal() {
+ try {
+ return getService().getRingerModeInternal();
+ } catch (RemoteException e) {
+ Log.w(TAG, "Error calling getRingerModeInternal", e);
+ return RINGER_MODE_NORMAL;
+ }
+ }
+
+ /**
* Set Hdmi Cec system audio mode.
*
* @param on whether to be on system audio mode
@@ -3342,7 +3607,7 @@ public class AudioManager {
* @hide
*/
public void registerAudioPortUpdateListener(OnAudioPortUpdateListener l) {
- mAudioPortEventHandler.registerListener(l);
+ sAudioPortEventHandler.registerListener(l);
}
/**
@@ -3350,7 +3615,7 @@ public class AudioManager {
* @hide
*/
public void unregisterAudioPortUpdateListener(OnAudioPortUpdateListener l) {
- mAudioPortEventHandler.unregisterListener(l);
+ sAudioPortEventHandler.unregisterListener(l);
}
//
@@ -3358,23 +3623,23 @@ public class AudioManager {
//
static final int AUDIOPORT_GENERATION_INIT = 0;
- Integer mAudioPortGeneration = new Integer(AUDIOPORT_GENERATION_INIT);
- ArrayList<AudioPort> mAudioPortsCached = new ArrayList<AudioPort>();
- ArrayList<AudioPatch> mAudioPatchesCached = new ArrayList<AudioPatch>();
+ static Integer sAudioPortGeneration = new Integer(AUDIOPORT_GENERATION_INIT);
+ static ArrayList<AudioPort> sAudioPortsCached = new ArrayList<AudioPort>();
+ static ArrayList<AudioPatch> sAudioPatchesCached = new ArrayList<AudioPatch>();
- int resetAudioPortGeneration() {
+ static int resetAudioPortGeneration() {
int generation;
- synchronized (mAudioPortGeneration) {
- generation = mAudioPortGeneration;
- mAudioPortGeneration = AUDIOPORT_GENERATION_INIT;
+ synchronized (sAudioPortGeneration) {
+ generation = sAudioPortGeneration;
+ sAudioPortGeneration = AUDIOPORT_GENERATION_INIT;
}
return generation;
}
- int updateAudioPortCache(ArrayList<AudioPort> ports, ArrayList<AudioPatch> patches) {
- synchronized (mAudioPortGeneration) {
+ static int updateAudioPortCache(ArrayList<AudioPort> ports, ArrayList<AudioPatch> patches) {
+ synchronized (sAudioPortGeneration) {
- if (mAudioPortGeneration == AUDIOPORT_GENERATION_INIT) {
+ if (sAudioPortGeneration == AUDIOPORT_GENERATION_INIT) {
int[] patchGeneration = new int[1];
int[] portGeneration = new int[1];
int status;
@@ -3413,23 +3678,23 @@ public class AudioManager {
}
}
- mAudioPortsCached = newPorts;
- mAudioPatchesCached = newPatches;
- mAudioPortGeneration = portGeneration[0];
+ sAudioPortsCached = newPorts;
+ sAudioPatchesCached = newPatches;
+ sAudioPortGeneration = portGeneration[0];
}
if (ports != null) {
ports.clear();
- ports.addAll(mAudioPortsCached);
+ ports.addAll(sAudioPortsCached);
}
if (patches != null) {
patches.clear();
- patches.addAll(mAudioPatchesCached);
+ patches.addAll(sAudioPatchesCached);
}
}
return SUCCESS;
}
- AudioPortConfig updatePortConfig(AudioPortConfig portCfg, ArrayList<AudioPort> ports) {
+ static AudioPortConfig updatePortConfig(AudioPortConfig portCfg, ArrayList<AudioPort> ports) {
AudioPort port = portCfg.port();
int k;
for (k = 0; k < ports.size(); k++) {
diff --git a/media/java/android/media/AudioManagerInternal.java b/media/java/android/media/AudioManagerInternal.java
index 7c0d758..616bdd1 100644
--- a/media/java/android/media/AudioManagerInternal.java
+++ b/media/java/android/media/AudioManagerInternal.java
@@ -15,6 +15,8 @@
*/
package android.media;
+import android.os.IBinder;
+
import com.android.server.LocalServices;
/**
@@ -35,4 +37,26 @@ public abstract class AudioManagerInternal {
public abstract void setStreamVolumeForUid(int streamType, int direction, int flags,
String callingPackage, int uid);
+
+ public abstract void adjustMasterVolumeForUid(int steps, int flags, String callingPackage,
+ int uid);
+
+ public abstract void setMasterMuteForUid(boolean state, int flags, String callingPackage,
+ IBinder cb, int uid);
+
+ public abstract void setRingerModeDelegate(RingerModeDelegate delegate);
+
+ public abstract int getRingerModeInternal();
+
+ public abstract void setRingerModeInternal(int ringerMode, String caller);
+
+ public interface RingerModeDelegate {
+ /** Called when external ringer mode is evaluated, returns the new internal ringer mode */
+ int onSetRingerModeExternal(int ringerModeOld, int ringerModeNew, String caller,
+ int ringerModeInternal);
+
+ /** Called when internal ringer mode is evaluated, returns the new external ringer mode */
+ int onSetRingerModeInternal(int ringerModeOld, int ringerModeNew, String caller,
+ int ringerModeExternal);
+ }
}
diff --git a/media/java/android/media/AudioPatch.java b/media/java/android/media/AudioPatch.java
index 81eceb1..acadb41 100644
--- a/media/java/android/media/AudioPatch.java
+++ b/media/java/android/media/AudioPatch.java
@@ -52,4 +52,25 @@ public class AudioPatch {
public AudioPortConfig[] sinks() {
return mSinks;
}
+
+ @Override
+ public String toString() {
+ StringBuilder s = new StringBuilder();
+ s.append("mHandle: ");
+ s.append(mHandle.toString());
+
+ s.append(" mSources: {");
+ for (AudioPortConfig source : mSources) {
+ s.append(source.toString());
+ s.append(", ");
+ }
+ s.append("} mSinks: {");
+ for (AudioPortConfig sink : mSinks) {
+ s.append(sink.toString());
+ s.append(", ");
+ }
+ s.append("}");
+
+ return s.toString();
+ }
}
diff --git a/media/java/android/media/AudioPort.java b/media/java/android/media/AudioPort.java
index 53212aa..1ab7e89 100644
--- a/media/java/android/media/AudioPort.java
+++ b/media/java/android/media/AudioPort.java
@@ -67,7 +67,7 @@ public class AudioPort {
AudioHandle mHandle;
- private final int mRole;
+ protected final int mRole;
private final int[] mSamplingRates;
private final int[] mChannelMasks;
private final int[] mFormats;
@@ -176,8 +176,20 @@ public class AudioPort {
@Override
public String toString() {
- return "{mHandle:" + mHandle
- + ", mRole:" + mRole
+ String role = Integer.toString(mRole);
+ switch (mRole) {
+ case ROLE_NONE:
+ role = "NONE";
+ break;
+ case ROLE_SOURCE:
+ role = "SOURCE";
+ break;
+ case ROLE_SINK:
+ role = "SINK";
+ break;
+ }
+ return "{mHandle: " + mHandle
+ + ", mRole: " + role
+ "}";
}
}
diff --git a/media/java/android/media/AudioPortEventHandler.java b/media/java/android/media/AudioPortEventHandler.java
index 9db4994..ba2a59d 100644
--- a/media/java/android/media/AudioPortEventHandler.java
+++ b/media/java/android/media/AudioPortEventHandler.java
@@ -31,94 +31,96 @@ import java.lang.ref.WeakReference;
*/
class AudioPortEventHandler {
- private final Handler mHandler;
- private ArrayList<AudioManager.OnAudioPortUpdateListener> mListeners;
- private AudioManager mAudioManager;
+ private Handler mHandler;
+ private final ArrayList<AudioManager.OnAudioPortUpdateListener> mListeners =
+ new ArrayList<AudioManager.OnAudioPortUpdateListener>();
- private static String TAG = "AudioPortEventHandler";
+ private static final String TAG = "AudioPortEventHandler";
private static final int AUDIOPORT_EVENT_PORT_LIST_UPDATED = 1;
private static final int AUDIOPORT_EVENT_PATCH_LIST_UPDATED = 2;
private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
- AudioPortEventHandler(AudioManager audioManager) {
- mAudioManager = audioManager;
- mListeners = new ArrayList<AudioManager.OnAudioPortUpdateListener>();
-
- // find the looper for our new event handler
- Looper looper = Looper.getMainLooper();
-
- if (looper != null) {
- mHandler = new Handler(looper) {
- @Override
- public void handleMessage(Message msg) {
- ArrayList<AudioManager.OnAudioPortUpdateListener> listeners;
- synchronized (this) {
- if (msg.what == AUDIOPORT_EVENT_NEW_LISTENER) {
- listeners = new ArrayList<AudioManager.OnAudioPortUpdateListener>();
- if (mListeners.contains(msg.obj)) {
- listeners.add((AudioManager.OnAudioPortUpdateListener)msg.obj);
+ void init() {
+ synchronized (this) {
+ if (mHandler != null) {
+ return;
+ }
+ // find the looper for our new event handler
+ Looper looper = Looper.getMainLooper();
+
+ if (looper != null) {
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ ArrayList<AudioManager.OnAudioPortUpdateListener> listeners;
+ synchronized (this) {
+ if (msg.what == AUDIOPORT_EVENT_NEW_LISTENER) {
+ listeners = new ArrayList<AudioManager.OnAudioPortUpdateListener>();
+ if (mListeners.contains(msg.obj)) {
+ listeners.add((AudioManager.OnAudioPortUpdateListener)msg.obj);
+ }
+ } else {
+ listeners = mListeners;
}
- } else {
- listeners = mListeners;
}
- }
- if (listeners.isEmpty()) {
- return;
- }
- // reset audio port cache if the event corresponds to a change coming
- // from audio policy service or if mediaserver process died.
- if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED ||
- msg.what == AUDIOPORT_EVENT_PATCH_LIST_UPDATED ||
- msg.what == AUDIOPORT_EVENT_SERVICE_DIED) {
- mAudioManager.resetAudioPortGeneration();
- }
- ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
- ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
- if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
- int status = mAudioManager.updateAudioPortCache(ports, patches);
- if (status != AudioManager.SUCCESS) {
+ if (listeners.isEmpty()) {
return;
}
- }
-
- switch (msg.what) {
- case AUDIOPORT_EVENT_NEW_LISTENER:
- case AUDIOPORT_EVENT_PORT_LIST_UPDATED:
- AudioPort[] portList = ports.toArray(new AudioPort[0]);
- for (int i = 0; i < listeners.size(); i++) {
- listeners.get(i).onAudioPortListUpdate(portList);
+ // reset audio port cache if the event corresponds to a change coming
+ // from audio policy service or if mediaserver process died.
+ if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED ||
+ msg.what == AUDIOPORT_EVENT_PATCH_LIST_UPDATED ||
+ msg.what == AUDIOPORT_EVENT_SERVICE_DIED) {
+ AudioManager.resetAudioPortGeneration();
}
- if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED) {
- break;
+ ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
+ ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
+ if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
+ int status = AudioManager.updateAudioPortCache(ports, patches);
+ if (status != AudioManager.SUCCESS) {
+ return;
+ }
}
- // FALL THROUGH
- case AUDIOPORT_EVENT_PATCH_LIST_UPDATED:
- AudioPatch[] patchList = patches.toArray(new AudioPatch[0]);
- for (int i = 0; i < listeners.size(); i++) {
- listeners.get(i).onAudioPatchListUpdate(patchList);
- }
- break;
+ switch (msg.what) {
+ case AUDIOPORT_EVENT_NEW_LISTENER:
+ case AUDIOPORT_EVENT_PORT_LIST_UPDATED:
+ AudioPort[] portList = ports.toArray(new AudioPort[0]);
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onAudioPortListUpdate(portList);
+ }
+ if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED) {
+ break;
+ }
+ // FALL THROUGH
- case AUDIOPORT_EVENT_SERVICE_DIED:
- for (int i = 0; i < listeners.size(); i++) {
- listeners.get(i).onServiceDied();
- }
- break;
+ case AUDIOPORT_EVENT_PATCH_LIST_UPDATED:
+ AudioPatch[] patchList = patches.toArray(new AudioPatch[0]);
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onAudioPatchListUpdate(patchList);
+ }
+ break;
+
+ case AUDIOPORT_EVENT_SERVICE_DIED:
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onServiceDied();
+ }
+ break;
- default:
- break;
+ default:
+ break;
+ }
}
- }
- };
- } else {
- mHandler = null;
+ };
+ native_setup(new WeakReference<AudioPortEventHandler>(this));
+ } else {
+ mHandler = null;
+ }
}
-
- native_setup(new WeakReference<AudioPortEventHandler>(this));
}
+
private native void native_setup(Object module_this);
@Override
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index 52b4649..de10ef9 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -273,18 +273,23 @@ public class AudioRecord
mInitializationLooper = Looper.getMainLooper();
}
- mAudioAttributes = attributes;
-
// is this AudioRecord using REMOTE_SUBMIX at full volume?
- if (mAudioAttributes.getCapturePreset() == MediaRecorder.AudioSource.REMOTE_SUBMIX) {
- final Iterator<String> tagsIter = mAudioAttributes.getTags().iterator();
+ if (attributes.getCapturePreset() == MediaRecorder.AudioSource.REMOTE_SUBMIX) {
+ final AudioAttributes.Builder filteredAttr = new AudioAttributes.Builder();
+ final Iterator<String> tagsIter = attributes.getTags().iterator();
while (tagsIter.hasNext()) {
- if (tagsIter.next().equalsIgnoreCase(SUBMIX_FIXED_VOLUME)) {
+ final String tag = tagsIter.next();
+ if (tag.equalsIgnoreCase(SUBMIX_FIXED_VOLUME)) {
mIsSubmixFullVolume = true;
Log.v(TAG, "Will record from REMOTE_SUBMIX at full fixed volume");
- break;
+ } else { // SUBMIX_FIXED_VOLUME: is not to be propagated to the native layers
+ filteredAttr.addTag(tag);
}
}
+ filteredAttr.setInternalCapturePreset(attributes.getCapturePreset());
+ mAudioAttributes = filteredAttr.build();
+ } else {
+ mAudioAttributes = attributes;
}
int rate = 0;
@@ -371,6 +376,7 @@ public class AudioRecord
// audio source
if ( (audioSource < MediaRecorder.AudioSource.DEFAULT) ||
((audioSource > MediaRecorder.getAudioSourceMax()) &&
+ (audioSource != MediaRecorder.AudioSource.FM_TUNER) &&
(audioSource != MediaRecorder.AudioSource.HOTWORD)) ) {
throw new IllegalArgumentException("Invalid audio source.");
}
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 8a78a8f..2ef245c 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -48,8 +48,10 @@ import android.hardware.hdmi.HdmiTvClient;
import android.hardware.usb.UsbManager;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
+import android.media.audiopolicy.AudioMix;
+import android.media.audiopolicy.AudioPolicy;
import android.media.audiopolicy.AudioPolicyConfig;
-import android.media.session.MediaSessionLegacyHelper;
+import android.media.audiopolicy.IAudioPolicyCallback;
import android.os.Binder;
import android.os.Build;
import android.os.Environment;
@@ -60,13 +62,11 @@ import android.os.Message;
import android.os.PowerManager;
import android.os.RemoteCallbackList;
import android.os.RemoteException;
-import android.os.ServiceManager;
import android.os.SystemClock;
import android.os.SystemProperties;
import android.os.UserHandle;
import android.os.Vibrator;
import android.provider.Settings;
-import android.provider.Settings.Global;
import android.provider.Settings.System;
import android.telecom.TelecomManager;
import android.text.TextUtils;
@@ -74,12 +74,11 @@ import android.util.Log;
import android.util.MathUtils;
import android.util.Slog;
import android.view.KeyEvent;
+import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityManager;
-import android.view.OrientationEventListener;
-import com.android.internal.telephony.ITelephony;
import com.android.internal.util.XmlUtils;
import com.android.server.LocalServices;
@@ -90,8 +89,6 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@@ -99,6 +96,7 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
/**
* The implementation of the volume manager service.
@@ -118,6 +116,10 @@ public class AudioService extends IAudioService.Stub {
/** Debug audio mode */
protected static final boolean DEBUG_MODE = Log.isLoggable(TAG + ".MOD", Log.DEBUG);
+
+ /** Debug audio policy feature */
+ protected static final boolean DEBUG_AP = Log.isLoggable(TAG + ".AP", Log.DEBUG);
+
/** Debug volumes */
protected static final boolean DEBUG_VOL = Log.isLoggable(TAG + ".VOL", Log.DEBUG);
@@ -248,7 +250,7 @@ public class AudioService extends IAudioService.Stub {
private final int[][] SOUND_EFFECT_FILES_MAP = new int[AudioManager.NUM_SOUND_EFFECTS][2];
/** @hide Maximum volume index values for audio streams */
- private static final int[] MAX_STREAM_VOLUME = new int[] {
+ private static int[] MAX_STREAM_VOLUME = new int[] {
5, // STREAM_VOICE_CALL
7, // STREAM_SYSTEM
7, // STREAM_RING
@@ -260,6 +262,20 @@ public class AudioService extends IAudioService.Stub {
15, // STREAM_DTMF
15 // STREAM_TTS
};
+
+ private static int[] DEFAULT_STREAM_VOLUME = new int[] {
+ 4, // STREAM_VOICE_CALL
+ 7, // STREAM_SYSTEM
+ 5, // STREAM_RING
+ 11, // STREAM_MUSIC
+ 6, // STREAM_ALARM
+ 5, // STREAM_NOTIFICATION
+ 7, // STREAM_BLUETOOTH_SCO
+ 7, // STREAM_SYSTEM_ENFORCED
+ 11, // STREAM_DTMF
+ 11 // STREAM_TTS
+ };
+
/* mStreamVolumeAlias[] indicates for each stream if it uses the volume settings
* of another stream: This avoids multiplying the volume settings for hidden
* stream types that follow other stream behavior for volume settings
@@ -359,7 +375,8 @@ public class AudioService extends IAudioService.Stub {
* {@link AudioManager#RINGER_MODE_VIBRATE}.
*/
// protected by mSettingsLock
- private int mRingerMode;
+ private int mRingerMode; // internal ringer mode, affects muting of underlying streams
+ private int mRingerModeExternal = -1; // reported ringer mode to outside clients (AudioManager)
/** @see System#MODE_RINGER_STREAMS_AFFECTED */
private int mRingerModeAffectedStreams = 0;
@@ -515,6 +532,10 @@ public class AudioService extends IAudioService.Stub {
private AudioOrientationEventListener mOrientationListener;
+ private static Long mLastDeviceConnectMsgTime = new Long(0);
+
+ private AudioManagerInternal.RingerModeDelegate mRingerModeDelegate;
+
///////////////////////////////////////////////////////////////////////////
// Construction
///////////////////////////////////////////////////////////////////////////
@@ -529,7 +550,7 @@ public class AudioService extends IAudioService.Stub {
com.android.internal.R.bool.config_voice_capable)) {
mPlatformType = PLATFORM_VOICE;
} else if (context.getPackageManager().hasSystemFeature(
- PackageManager.FEATURE_TELEVISION)) {
+ PackageManager.FEATURE_LEANBACK)) {
mPlatformType = PLATFORM_TELEVISION;
} else {
mPlatformType = PLATFORM_DEFAULT;
@@ -542,12 +563,18 @@ public class AudioService extends IAudioService.Stub {
mHasVibrator = vibrator == null ? false : vibrator.hasVibrator();
// Intialized volume
- MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = SystemProperties.getInt(
- "ro.config.vc_call_vol_steps",
- MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]);
- MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC] = SystemProperties.getInt(
- "ro.config.media_vol_steps",
- MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]);
+ int maxVolume = SystemProperties.getInt("ro.config.vc_call_vol_steps",
+ MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]);
+ if (maxVolume != MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]) {
+ MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = maxVolume;
+ DEFAULT_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = (maxVolume * 3) / 4;
+ }
+ maxVolume = SystemProperties.getInt("ro.config.media_vol_steps",
+ MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]);
+ if (maxVolume != MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]) {
+ MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC] = maxVolume;
+ DEFAULT_STREAM_VOLUME[AudioSystem.STREAM_MUSIC] = (maxVolume * 3) / 4;
+ }
sSoundEffectVolumeDb = context.getResources().getInteger(
com.android.internal.R.integer.config_soundEffectVolumeDb);
@@ -583,6 +610,10 @@ public class AudioService extends IAudioService.Stub {
mUseFixedVolume = mContext.getResources().getBoolean(
com.android.internal.R.bool.config_useFixedVolume);
+ mUseMasterVolume = context.getResources().getBoolean(
+ com.android.internal.R.bool.config_useMasterVolume);
+ mMasterVolumeRamp = context.getResources().getIntArray(
+ com.android.internal.R.array.config_masterVolumeRamp);
// must be called before readPersistedSettings() which needs a valid mStreamVolumeAlias[]
// array initialized by updateStreamVolumeAlias()
@@ -596,7 +627,7 @@ public class AudioService extends IAudioService.Stub {
// Call setRingerModeInt() to apply correct mute
// state on streams affected by ringer mode.
mRingerModeMutedStreams = 0;
- setRingerModeInt(getRingerMode(), false);
+ setRingerModeInt(getRingerModeInternal(), false);
// Register for device connection intent broadcasts.
IntentFilter intentFilter =
@@ -633,13 +664,8 @@ public class AudioService extends IAudioService.Stub {
context.registerReceiver(mReceiver, intentFilter);
- mUseMasterVolume = context.getResources().getBoolean(
- com.android.internal.R.bool.config_useMasterVolume);
restoreMasterVolume();
- mMasterVolumeRamp = context.getResources().getIntArray(
- com.android.internal.R.array.config_masterVolumeRamp);
-
LocalServices.addService(AudioManagerInternal.class, new AudioServiceInternal());
}
@@ -716,15 +742,17 @@ public class AudioService extends IAudioService.Stub {
}
private void checkAllAliasStreamVolumes() {
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = 0; streamType < numStreamTypes; streamType++) {
- if (streamType != mStreamVolumeAlias[streamType]) {
- mStreamStates[streamType].
+ synchronized (VolumeStreamState.class) {
+ int numStreamTypes = AudioSystem.getNumStreamTypes();
+ for (int streamType = 0; streamType < numStreamTypes; streamType++) {
+ if (streamType != mStreamVolumeAlias[streamType]) {
+ mStreamStates[streamType].
setAllIndexes(mStreamStates[mStreamVolumeAlias[streamType]]);
- }
- // apply stream volume
- if (!mStreamStates[streamType].isMuted()) {
- mStreamStates[streamType].applyAllVolumes();
+ }
+ // apply stream volume
+ if (!mStreamStates[streamType].isMuted_syncVSS()) {
+ mStreamStates[streamType].applyAllVolumes();
+ }
}
}
}
@@ -804,7 +832,7 @@ public class AudioService extends IAudioService.Stub {
if (updateVolumes) {
mStreamStates[AudioSystem.STREAM_DTMF].setAllIndexes(mStreamStates[dtmfStreamAlias]);
// apply stream mute states according to new value of mRingerModeAffectedStreams
- setRingerModeInt(getRingerMode(), false);
+ setRingerModeInt(getRingerModeInternal(), false);
sendMsg(mAudioHandler,
MSG_SET_ALL_VOLUMES,
SENDMSG_QUEUE,
@@ -844,7 +872,7 @@ public class AudioService extends IAudioService.Stub {
int ringerMode = ringerModeFromSettings;
// sanity check in case the settings are restored from a device with incompatible
// ringer modes
- if (!AudioManager.isValidRingerMode(ringerMode)) {
+ if (!isValidRingerMode(ringerMode)) {
ringerMode = AudioManager.RINGER_MODE_NORMAL;
}
if ((ringerMode == AudioManager.RINGER_MODE_VIBRATE) && !mHasVibrator) {
@@ -858,6 +886,9 @@ public class AudioService extends IAudioService.Stub {
}
synchronized(mSettingsLock) {
mRingerMode = ringerMode;
+ if (mRingerModeExternal == -1) {
+ mRingerModeExternal = mRingerMode;
+ }
// System.VIBRATE_ON is not used any more but defaults for mVibrateSetting
// are still needed while setVibrateSetting() and getVibrateSetting() are being
@@ -897,8 +928,9 @@ public class AudioService extends IAudioService.Stub {
// Each stream will read its own persisted settings
- // Broadcast the sticky intent
- broadcastRingerMode(ringerMode);
+ // Broadcast the sticky intents
+ broadcastRingerMode(AudioManager.RINGER_MODE_CHANGED_ACTION, mRingerModeExternal);
+ broadcastRingerMode(AudioManager.INTERNAL_RINGER_MODE_CHANGED_ACTION, mRingerMode);
// Broadcast vibrate settings
broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_RINGER);
@@ -1042,7 +1074,7 @@ public class AudioService extends IAudioService.Stub {
// or the stream type is one that is affected by ringer modes
if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
(streamTypeAlias == getMasterStreamType())) {
- int ringerMode = getRingerMode();
+ int ringerMode = getRingerModeInternal();
// do not vibrate if already in vibrate mode
if (ringerMode == AudioManager.RINGER_MODE_VIBRATE) {
flags &= ~AudioManager.FLAG_VIBRATE;
@@ -1055,6 +1087,10 @@ public class AudioService extends IAudioService.Stub {
if ((result & AudioManager.FLAG_SHOW_SILENT_HINT) != 0) {
flags |= AudioManager.FLAG_SHOW_SILENT_HINT;
}
+ // If suppressing a volume down adjustment in vibrate mode, display the UI hint
+ if ((result & AudioManager.FLAG_SHOW_VIBRATE_HINT) != 0) {
+ flags |= AudioManager.FLAG_SHOW_VIBRATE_HINT;
+ }
}
int oldIndex = mStreamStates[streamType].getIndex(device);
@@ -1090,20 +1126,11 @@ public class AudioService extends IAudioService.Stub {
// Check if volume update should be send to Hdmi system audio.
int newIndex = mStreamStates[streamType].getIndex(device);
+ if (streamTypeAlias == AudioSystem.STREAM_MUSIC) {
+ setSystemAudioVolume(oldIndex, newIndex, getStreamMaxVolume(streamType), flags);
+ }
if (mHdmiManager != null) {
synchronized (mHdmiManager) {
- if (mHdmiTvClient != null &&
- streamTypeAlias == AudioSystem.STREAM_MUSIC &&
- (flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) == 0 &&
- oldIndex != newIndex) {
- int maxIndex = getStreamMaxVolume(streamType);
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported) {
- mHdmiTvClient.setSystemAudioVolume(
- (oldIndex + 5) / 10, (newIndex + 5) / 10, maxIndex);
- }
- }
- }
// mHdmiCecSink true => mHdmiPlaybackClient != null
if (mHdmiCecSink &&
streamTypeAlias == AudioSystem.STREAM_MUSIC &&
@@ -1122,8 +1149,34 @@ public class AudioService extends IAudioService.Stub {
sendVolumeUpdate(streamType, oldIndex, index, flags);
}
+ private void setSystemAudioVolume(int oldVolume, int newVolume, int maxVolume, int flags) {
+ if (mHdmiManager == null
+ || mHdmiTvClient == null
+ || oldVolume == newVolume
+ || (flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) != 0) return;
+
+ // Sets the audio volume of AVR when we are in system audio mode. The new volume info
+ // is tranformed to HDMI-CEC commands and passed through CEC bus.
+ synchronized (mHdmiManager) {
+ if (!mHdmiSystemAudioSupported) return;
+ synchronized (mHdmiTvClient) {
+ final long token = Binder.clearCallingIdentity();
+ try {
+ mHdmiTvClient.setSystemAudioVolume(
+ (oldVolume + 5) / 10, (newVolume + 5) / 10, maxVolume);
+ } finally {
+ Binder.restoreCallingIdentity(token);
+ }
+ }
+ }
+ }
+
/** @see AudioManager#adjustMasterVolume(int, int) */
public void adjustMasterVolume(int steps, int flags, String callingPackage) {
+ adjustMasterVolume(steps, flags, callingPackage, Binder.getCallingUid());
+ }
+
+ public void adjustMasterVolume(int steps, int flags, String callingPackage, int uid) {
if (mUseFixedVolume) {
return;
}
@@ -1138,7 +1191,7 @@ public class AudioService extends IAudioService.Stub {
}
//Log.d(TAG, "adjustMasterVolume volume: " + volume + " steps: " + steps);
- setMasterVolume(volume, flags, callingPackage);
+ setMasterVolume(volume, flags, callingPackage, uid);
}
// StreamVolumeCommand contains the information needed to defer the process of
@@ -1177,7 +1230,7 @@ public class AudioService extends IAudioService.Stub {
} else {
newRingerMode = AudioManager.RINGER_MODE_NORMAL;
}
- setRingerMode(newRingerMode, false /*checkZen*/);
+ setRingerMode(newRingerMode, TAG + ".onSetStreamVolume", false /*external*/);
}
}
@@ -1229,21 +1282,8 @@ public class AudioService extends IAudioService.Stub {
}
}
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- if (mHdmiTvClient != null &&
- streamTypeAlias == AudioSystem.STREAM_MUSIC &&
- (flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) == 0 &&
- oldIndex != index) {
- int maxIndex = getStreamMaxVolume(streamType);
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported) {
- mHdmiTvClient.setSystemAudioVolume(
- (oldIndex + 5) / 10, (index + 5) / 10, maxIndex);
- }
- }
- }
- }
+ if (streamTypeAlias == AudioSystem.STREAM_MUSIC) {
+ setSystemAudioVolume(oldIndex, index, getStreamMaxVolume(streamType), flags);
}
flags &= ~AudioManager.FLAG_FIXED_VOLUME;
@@ -1361,6 +1401,7 @@ public class AudioService extends IAudioService.Stub {
}
private void sendBroadcastToAll(Intent intent) {
+ intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT);
final long ident = Binder.clearCallingIdentity();
try {
mContext.sendBroadcastAsUser(intent, UserHandle.ALL);
@@ -1384,15 +1425,8 @@ public class AudioService extends IAudioService.Stub {
streamType = AudioSystem.STREAM_NOTIFICATION;
}
- // If Hdmi-CEC system audio mode is on, show volume bar
- // only when TV receives volume notification from Audio Receiver.
- if (mHdmiTvClient != null && streamType == AudioSystem.STREAM_MUSIC) {
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported &&
- ((flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) == 0)) {
- flags &= ~AudioManager.FLAG_SHOW_UI;
- }
- }
+ if (streamType == AudioSystem.STREAM_MUSIC) {
+ flags = updateFlagsForSystemAudio(flags);
}
mVolumeController.postVolumeChanged(streamType, flags);
@@ -1407,9 +1441,23 @@ public class AudioService extends IAudioService.Stub {
}
}
+ // If Hdmi-CEC system audio mode is on, we show volume bar only when TV
+ // receives volume notification from Audio Receiver.
+ private int updateFlagsForSystemAudio(int flags) {
+ if (mHdmiTvClient != null) {
+ synchronized (mHdmiTvClient) {
+ if (mHdmiSystemAudioSupported &&
+ ((flags & AudioManager.FLAG_HDMI_SYSTEM_AUDIO_VOLUME) == 0)) {
+ flags &= ~AudioManager.FLAG_SHOW_UI;
+ }
+ }
+ }
+ return flags;
+ }
+
// UI update and Broadcast Intent
private void sendMasterVolumeUpdate(int flags, int oldVolume, int newVolume) {
- mVolumeController.postMasterVolumeChanged(flags);
+ mVolumeController.postMasterVolumeChanged(updateFlagsForSystemAudio(flags));
Intent intent = new Intent(AudioManager.MASTER_VOLUME_CHANGED_ACTION);
intent.putExtra(AudioManager.EXTRA_PREV_MASTER_VOLUME_VALUE, oldVolume);
@@ -1419,7 +1467,7 @@ public class AudioService extends IAudioService.Stub {
// UI update and Broadcast Intent
private void sendMasterMuteUpdate(boolean muted, int flags) {
- mVolumeController.postMasterMuteChanged(flags);
+ mVolumeController.postMasterMuteChanged(updateFlagsForSystemAudio(flags));
broadcastMasterMuteStatus(muted);
}
@@ -1465,9 +1513,11 @@ public class AudioService extends IAudioService.Stub {
if (mUseFixedVolume) {
return;
}
-
+ int streamAlias = mStreamVolumeAlias[streamType];
for (int stream = 0; stream < mStreamStates.length; stream++) {
- if (!isStreamAffectedByMute(stream) || stream == streamType) continue;
+ if (!isStreamAffectedByMute(streamAlias) || streamAlias == mStreamVolumeAlias[stream]) {
+ continue;
+ }
mStreamStates[stream].mute(cb, state);
}
}
@@ -1477,26 +1527,50 @@ public class AudioService extends IAudioService.Stub {
if (mUseFixedVolume) {
return;
}
+ if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
+ streamType = getActiveStreamType(streamType);
+ }
+ int streamAlias = mStreamVolumeAlias[streamType];
+ if (isStreamAffectedByMute(streamAlias)) {
+ if (streamAlias == AudioSystem.STREAM_MUSIC) {
+ setSystemAudioMute(state);
+ }
+ for (int stream = 0; stream < mStreamStates.length; stream++) {
+ if (streamAlias == mStreamVolumeAlias[stream]) {
+ mStreamStates[stream].mute(cb, state);
- if (isStreamAffectedByMute(streamType)) {
- if (mHdmiManager != null) {
- synchronized (mHdmiManager) {
- if (streamType == AudioSystem.STREAM_MUSIC && mHdmiTvClient != null) {
- synchronized (mHdmiTvClient) {
- if (mHdmiSystemAudioSupported) {
- mHdmiTvClient.setSystemAudioMute(state);
- }
- }
- }
+ Intent intent = new Intent(AudioManager.STREAM_MUTE_CHANGED_ACTION);
+ intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_TYPE, stream);
+ intent.putExtra(AudioManager.EXTRA_STREAM_VOLUME_MUTED, state);
+ sendBroadcastToAll(intent);
+ }
+ }
+ }
+ }
+
+ private void setSystemAudioMute(boolean state) {
+ if (mHdmiManager == null || mHdmiTvClient == null) return;
+ synchronized (mHdmiManager) {
+ if (!mHdmiSystemAudioSupported) return;
+ synchronized (mHdmiTvClient) {
+ final long token = Binder.clearCallingIdentity();
+ try {
+ mHdmiTvClient.setSystemAudioMute(state);
+ } finally {
+ Binder.restoreCallingIdentity(token);
}
}
- mStreamStates[streamType].mute(cb, state);
}
}
/** get stream mute state. */
public boolean isStreamMute(int streamType) {
- return mStreamStates[streamType].isMuted();
+ if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
+ streamType = getActiveStreamType(streamType);
+ }
+ synchronized (VolumeStreamState.class) {
+ return mStreamStates[streamType].isMuted_syncVSS();
+ }
}
private class RmtSbmxFullVolDeathHandler implements IBinder.DeathRecipient {
@@ -1601,19 +1675,29 @@ public class AudioService extends IAudioService.Stub {
/** @see AudioManager#setMasterMute(boolean, int) */
public void setMasterMute(boolean state, int flags, String callingPackage, IBinder cb) {
+ setMasterMuteInternal(state, flags, callingPackage, cb, Binder.getCallingUid());
+ }
+
+ private void setMasterMuteInternal(boolean state, int flags, String callingPackage, IBinder cb,
+ int uid) {
if (mUseFixedVolume) {
return;
}
- if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, Binder.getCallingUid(),
- callingPackage) != AppOpsManager.MODE_ALLOWED) {
+ if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, uid, callingPackage)
+ != AppOpsManager.MODE_ALLOWED) {
return;
}
if (state != AudioSystem.getMasterMute()) {
+ setSystemAudioMute(state);
AudioSystem.setMasterMute(state);
// Post a persist master volume msg
sendMsg(mAudioHandler, MSG_PERSIST_MASTER_VOLUME_MUTE, SENDMSG_REPLACE, state ? 1
: 0, UserHandle.getCallingUserId(), null, PERSIST_DELAY);
sendMasterMuteUpdate(state, flags);
+
+ Intent intent = new Intent(AudioManager.MASTER_MUTE_CHANGED_ACTION);
+ intent.putExtra(AudioManager.EXTRA_MASTER_VOLUME_MUTED, state);
+ sendBroadcastToAll(intent);
}
}
@@ -1626,35 +1710,47 @@ public class AudioService extends IAudioService.Stub {
return MAX_STREAM_VOLUME[streamType];
}
+ public static int getDefaultStreamVolume(int streamType) {
+ return DEFAULT_STREAM_VOLUME[streamType];
+ }
+
/** @see AudioManager#getStreamVolume(int) */
public int getStreamVolume(int streamType) {
ensureValidStreamType(streamType);
int device = getDeviceForStream(streamType);
- int index = mStreamStates[streamType].getIndex(device);
+ synchronized (VolumeStreamState.class) {
+ int index = mStreamStates[streamType].getIndex(device);
- // by convention getStreamVolume() returns 0 when a stream is muted.
- if (mStreamStates[streamType].isMuted()) {
- index = 0;
- }
- if (index != 0 && (mStreamVolumeAlias[streamType] == AudioSystem.STREAM_MUSIC) &&
- (device & mFixedVolumeDevices) != 0) {
- index = mStreamStates[streamType].getMaxIndex();
+ // by convention getStreamVolume() returns 0 when a stream is muted.
+ if (mStreamStates[streamType].isMuted_syncVSS()) {
+ index = 0;
+ }
+ if (index != 0 && (mStreamVolumeAlias[streamType] == AudioSystem.STREAM_MUSIC) &&
+ (device & mFixedVolumeDevices) != 0) {
+ index = mStreamStates[streamType].getMaxIndex();
+ }
+ return (index + 5) / 10;
}
- return (index + 5) / 10;
}
+ @Override
public int getMasterVolume() {
if (isMasterMute()) return 0;
return getLastAudibleMasterVolume();
}
+ @Override
public void setMasterVolume(int volume, int flags, String callingPackage) {
+ setMasterVolume(volume, flags, callingPackage, Binder.getCallingUid());
+ }
+
+ public void setMasterVolume(int volume, int flags, String callingPackage, int uid) {
if (mUseFixedVolume) {
return;
}
- if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, Binder.getCallingUid(),
- callingPackage) != AppOpsManager.MODE_ALLOWED) {
+ if (mAppOps.noteOp(AppOpsManager.OP_AUDIO_MASTER_VOLUME, uid, callingPackage)
+ != AppOpsManager.MODE_ALLOWED) {
return;
}
@@ -1677,6 +1773,7 @@ public class AudioService extends IAudioService.Stub {
// Post a persist master volume msg
sendMsg(mAudioHandler, MSG_PERSIST_MASTER_VOLUME, SENDMSG_REPLACE,
Math.round(volume * (float)1000.0), 0, null, PERSIST_DELAY);
+ setSystemAudioVolume(oldVolume, newVolume, getMasterMaxVolume(), flags);
}
// Send the volume update regardless whether there was a change.
sendMasterVolumeUpdate(flags, oldVolume, newVolume);
@@ -1726,53 +1823,94 @@ public class AudioService extends IAudioService.Stub {
: 0, UserHandle.getCallingUserId(), null, PERSIST_DELAY);
}
- /** @see AudioManager#getRingerMode() */
- public int getRingerMode() {
+ @Override
+ public int getRingerModeExternal() {
+ synchronized(mSettingsLock) {
+ return mRingerModeExternal;
+ }
+ }
+
+ @Override
+ public int getRingerModeInternal() {
synchronized(mSettingsLock) {
return mRingerMode;
}
}
private void ensureValidRingerMode(int ringerMode) {
- if (!AudioManager.isValidRingerMode(ringerMode)) {
+ if (!isValidRingerMode(ringerMode)) {
throw new IllegalArgumentException("Bad ringer mode " + ringerMode);
}
}
- /** @see AudioManager#setRingerMode(int) */
- public void setRingerMode(int ringerMode, boolean checkZen) {
+ /** @see AudioManager#isValidRingerMode(int) */
+ public boolean isValidRingerMode(int ringerMode) {
+ return ringerMode >= 0 && ringerMode <= AudioManager.RINGER_MODE_MAX;
+ }
+
+ public void setRingerModeExternal(int ringerMode, String caller) {
+ setRingerMode(ringerMode, caller, true /*external*/);
+ }
+
+ public void setRingerModeInternal(int ringerMode, String caller) {
+ enforceSelfOrSystemUI("setRingerModeInternal");
+ setRingerMode(ringerMode, caller, false /*external*/);
+ }
+
+ private void setRingerMode(int ringerMode, String caller, boolean external) {
if (mUseFixedVolume || isPlatformTelevision()) {
return;
}
-
+ if (caller == null || caller.length() == 0) {
+ throw new IllegalArgumentException("Bad caller: " + caller);
+ }
+ ensureValidRingerMode(ringerMode);
if ((ringerMode == AudioManager.RINGER_MODE_VIBRATE) && !mHasVibrator) {
ringerMode = AudioManager.RINGER_MODE_SILENT;
}
- if (checkZen) {
- checkZen(ringerMode);
- }
- if (ringerMode != getRingerMode()) {
- setRingerModeInt(ringerMode, true);
- // Send sticky broadcast
- broadcastRingerMode(ringerMode);
+ final long identity = Binder.clearCallingIdentity();
+ try {
+ synchronized (mSettingsLock) {
+ final int ringerModeInternal = getRingerModeInternal();
+ final int ringerModeExternal = getRingerModeExternal();
+ if (external) {
+ setRingerModeExt(ringerMode);
+ if (mRingerModeDelegate != null) {
+ ringerMode = mRingerModeDelegate.onSetRingerModeExternal(ringerModeExternal,
+ ringerMode, caller, ringerModeInternal);
+ }
+ if (ringerMode != ringerModeInternal) {
+ setRingerModeInt(ringerMode, true /*persist*/);
+ }
+ } else /*internal*/ {
+ if (ringerMode != ringerModeInternal) {
+ setRingerModeInt(ringerMode, true /*persist*/);
+ }
+ if (mRingerModeDelegate != null) {
+ ringerMode = mRingerModeDelegate.onSetRingerModeInternal(ringerModeInternal,
+ ringerMode, caller, ringerModeExternal);
+ }
+ setRingerModeExt(ringerMode);
+ }
+ }
+ } finally {
+ Binder.restoreCallingIdentity(identity);
}
}
- private void checkZen(int ringerMode) {
- // leave zen when callers set ringer-mode = normal or vibrate
- final int zen = Global.getInt(mContentResolver, Global.ZEN_MODE, Global.ZEN_MODE_OFF);
- if (ringerMode != AudioManager.RINGER_MODE_SILENT && zen != Global.ZEN_MODE_OFF) {
- final long ident = Binder.clearCallingIdentity();
- try {
- Global.putInt(mContentResolver, Global.ZEN_MODE, Global.ZEN_MODE_OFF);
- } finally {
- Binder.restoreCallingIdentity(ident);
- }
+ private void setRingerModeExt(int ringerMode) {
+ synchronized(mSettingsLock) {
+ if (ringerMode == mRingerModeExternal) return;
+ mRingerModeExternal = ringerMode;
}
+ // Send sticky broadcast
+ broadcastRingerMode(AudioManager.RINGER_MODE_CHANGED_ACTION, ringerMode);
}
private void setRingerModeInt(int ringerMode, boolean persist) {
+ final boolean change;
synchronized(mSettingsLock) {
+ change = mRingerMode != ringerMode;
mRingerMode = ringerMode;
}
@@ -1781,34 +1919,35 @@ public class AudioService extends IAudioService.Stub {
// Unmute stream if previously muted by ringer mode and ringer mode
// is RINGER_MODE_NORMAL or stream is not affected by ringer mode.
int numStreamTypes = AudioSystem.getNumStreamTypes();
+ final boolean ringerModeMute = ringerMode == AudioManager.RINGER_MODE_VIBRATE
+ || ringerMode == AudioManager.RINGER_MODE_SILENT;
for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- if (isStreamMutedByRingerMode(streamType)) {
- if (!isStreamAffectedByRingerMode(streamType) ||
- ringerMode == AudioManager.RINGER_MODE_NORMAL) {
- // ring and notifications volume should never be 0 when not silenced
- // on voice capable devices
- if (isPlatformVoice() &&
- mStreamVolumeAlias[streamType] == AudioSystem.STREAM_RING) {
- synchronized (mStreamStates[streamType]) {
- Set set = mStreamStates[streamType].mIndex.entrySet();
- Iterator i = set.iterator();
- while (i.hasNext()) {
- Map.Entry entry = (Map.Entry)i.next();
- if ((Integer)entry.getValue() == 0) {
- entry.setValue(10);
- }
+ final boolean isMuted = isStreamMutedByRingerMode(streamType);
+ final boolean shouldMute = ringerModeMute && isStreamAffectedByRingerMode(streamType);
+ if (isMuted == shouldMute) continue;
+ if (!shouldMute) {
+ // unmute
+ // ring and notifications volume should never be 0 when not silenced
+ // on voice capable devices or devices that support vibration
+ if ((isPlatformVoice() || mHasVibrator) &&
+ mStreamVolumeAlias[streamType] == AudioSystem.STREAM_RING) {
+ synchronized (VolumeStreamState.class) {
+ Set set = mStreamStates[streamType].mIndex.entrySet();
+ Iterator i = set.iterator();
+ while (i.hasNext()) {
+ Map.Entry entry = (Map.Entry)i.next();
+ if ((Integer)entry.getValue() == 0) {
+ entry.setValue(10);
}
}
}
- mStreamStates[streamType].mute(null, false);
- mRingerModeMutedStreams &= ~(1 << streamType);
}
+ mStreamStates[streamType].mute(null, false);
+ mRingerModeMutedStreams &= ~(1 << streamType);
} else {
- if (isStreamAffectedByRingerMode(streamType) &&
- ringerMode != AudioManager.RINGER_MODE_NORMAL) {
- mStreamStates[streamType].mute(null, true);
- mRingerModeMutedStreams |= (1 << streamType);
- }
+ // mute
+ mStreamStates[streamType].mute(null, true);
+ mRingerModeMutedStreams |= (1 << streamType);
}
}
@@ -1817,6 +1956,10 @@ public class AudioService extends IAudioService.Stub {
sendMsg(mAudioHandler, MSG_PERSIST_RINGER_MODE,
SENDMSG_REPLACE, 0, 0, null, PERSIST_DELAY);
}
+ if (change) {
+ // Send sticky broadcast
+ broadcastRingerMode(AudioManager.INTERNAL_RINGER_MODE_CHANGED_ACTION, ringerMode);
+ }
}
private void restoreMasterVolume() {
@@ -1840,10 +1983,10 @@ public class AudioService extends IAudioService.Stub {
switch (getVibrateSetting(vibrateType)) {
case AudioManager.VIBRATE_SETTING_ON:
- return getRingerMode() != AudioManager.RINGER_MODE_SILENT;
+ return getRingerModeExternal() != AudioManager.RINGER_MODE_SILENT;
case AudioManager.VIBRATE_SETTING_ONLY_SILENT:
- return getRingerMode() == AudioManager.RINGER_MODE_VIBRATE;
+ return getRingerModeExternal() == AudioManager.RINGER_MODE_VIBRATE;
case AudioManager.VIBRATE_SETTING_OFF:
// return false, even for incoming calls
@@ -2288,16 +2431,15 @@ public class AudioService extends IAudioService.Stub {
continue;
}
- synchronized (streamState) {
- streamState.readSettings();
-
+ streamState.readSettings();
+ synchronized (VolumeStreamState.class) {
// unmute stream that was muted but is not affect by mute anymore
- if (streamState.isMuted() && ((!isStreamAffectedByMute(streamType) &&
+ if (streamState.isMuted_syncVSS() && ((!isStreamAffectedByMute(streamType) &&
!isStreamMutedByRingerMode(streamType)) || mUseFixedVolume)) {
int size = streamState.mDeathHandlers.size();
for (int i = 0; i < size; i++) {
streamState.mDeathHandlers.get(i).mMuteCount = 1;
- streamState.mDeathHandlers.get(i).mute(false);
+ streamState.mDeathHandlers.get(i).mute_syncVSS(false);
}
}
}
@@ -2305,7 +2447,7 @@ public class AudioService extends IAudioService.Stub {
// apply new ringer mode before checking volume for alias streams so that streams
// muted by ringer mode have the correct volume
- setRingerModeInt(getRingerMode(), false);
+ setRingerModeInt(getRingerModeInternal(), false);
checkAllFixedVolumeDevices();
checkAllAliasStreamVolumes();
@@ -2546,13 +2688,17 @@ public class AudioService extends IAudioService.Stub {
if (mScoAudioState == SCO_STATE_INACTIVE) {
mScoAudioMode = scoAudioMode;
if (scoAudioMode == SCO_MODE_UNDEFINED) {
- mScoAudioMode = new Integer(Settings.Global.getInt(
- mContentResolver,
- "bluetooth_sco_channel_"+
- mBluetoothHeadsetDevice.getAddress(),
- SCO_MODE_VIRTUAL_CALL));
- if (mScoAudioMode > SCO_MODE_MAX || mScoAudioMode < 0) {
- mScoAudioMode = SCO_MODE_VIRTUAL_CALL;
+ if (mBluetoothHeadsetDevice != null) {
+ mScoAudioMode = new Integer(Settings.Global.getInt(
+ mContentResolver,
+ "bluetooth_sco_channel_"+
+ mBluetoothHeadsetDevice.getAddress(),
+ SCO_MODE_VIRTUAL_CALL));
+ if (mScoAudioMode > SCO_MODE_MAX || mScoAudioMode < 0) {
+ mScoAudioMode = SCO_MODE_VIRTUAL_CALL;
+ }
+ } else {
+ mScoAudioMode = SCO_MODE_RAW;
}
}
if (mBluetoothHeadset != null && mBluetoothHeadsetDevice != null) {
@@ -2952,7 +3098,7 @@ public class AudioService extends IAudioService.Stub {
*/
private int checkForRingerModeChange(int oldIndex, int direction, int step) {
int result = FLAG_ADJUST_VOLUME;
- int ringerMode = getRingerMode();
+ int ringerMode = getRingerModeInternal();
switch (ringerMode) {
case RINGER_MODE_NORMAL:
@@ -2983,9 +3129,12 @@ public class AudioService extends IAudioService.Stub {
break;
}
if ((direction == AudioManager.ADJUST_LOWER)) {
- if (VOLUME_SETS_RINGER_MODE_SILENT
- && mPrevVolDirection != AudioManager.ADJUST_LOWER) {
- ringerMode = RINGER_MODE_SILENT;
+ if (mPrevVolDirection != AudioManager.ADJUST_LOWER) {
+ if (VOLUME_SETS_RINGER_MODE_SILENT) {
+ ringerMode = RINGER_MODE_SILENT;
+ } else {
+ result |= AudioManager.FLAG_SHOW_VIBRATE_HINT;
+ }
}
} else if (direction == AudioManager.ADJUST_RAISE) {
ringerMode = RINGER_MODE_NORMAL;
@@ -3011,7 +3160,7 @@ public class AudioService extends IAudioService.Stub {
break;
}
- setRingerMode(ringerMode, false /*checkZen*/);
+ setRingerMode(ringerMode, TAG + ".checkForRingerModeChange", false /*external*/);
mPrevVolDirection = direction;
@@ -3102,7 +3251,10 @@ public class AudioService extends IAudioService.Stub {
TelecomManager telecomManager =
(TelecomManager) mContext.getSystemService(Context.TELECOM_SERVICE);
+
+ final long ident = Binder.clearCallingIdentity();
IsInCall = telecomManager.isInCall();
+ Binder.restoreCallingIdentity(ident);
return (IsInCall || getMode() == AudioManager.MODE_IN_COMMUNICATION);
}
@@ -3185,9 +3337,9 @@ public class AudioService extends IAudioService.Stub {
return suggestedStreamType;
}
- private void broadcastRingerMode(int ringerMode) {
+ private void broadcastRingerMode(String action, int ringerMode) {
// Send sticky broadcast
- Intent broadcast = new Intent(AudioManager.RINGER_MODE_CHANGED_ACTION);
+ Intent broadcast = new Intent(action);
broadcast.putExtra(AudioManager.EXTRA_RINGER_MODE, ringerMode);
broadcast.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT
| Intent.FLAG_RECEIVER_REPLACE_PENDING);
@@ -3227,8 +3379,15 @@ public class AudioService extends IAudioService.Stub {
} else if (existingMsgPolicy == SENDMSG_NOOP && handler.hasMessages(msg)) {
return;
}
-
- handler.sendMessageDelayed(handler.obtainMessage(msg, arg1, arg2, obj), delay);
+ synchronized (mLastDeviceConnectMsgTime) {
+ long time = SystemClock.uptimeMillis() + delay;
+ handler.sendMessageAtTime(handler.obtainMessage(msg, arg1, arg2, obj), time);
+ if (msg == MSG_SET_WIRED_DEVICE_CONNECTION_STATE ||
+ msg == MSG_SET_A2DP_SRC_CONNECTION_STATE ||
+ msg == MSG_SET_A2DP_SINK_CONNECTION_STATE) {
+ mLastDeviceConnectMsgTime = time;
+ }
+ }
}
boolean checkAudioSettingsPermission(String method) {
@@ -3307,6 +3466,12 @@ public class AudioService extends IAudioService.Stub {
// Inner classes
///////////////////////////////////////////////////////////////////////////
+ // NOTE: Locking order for synchronized objects related to volume or ringer mode management:
+ // 1 mScoclient OR mSafeMediaVolumeState
+ // 2 mSetModeDeathHandlers
+ // 3 mSettingsLock
+ // 4 VolumeStreamState.class
+ // 5 mCameraSoundForced
public class VolumeStreamState {
private final int mStreamType;
@@ -3342,8 +3507,9 @@ public class AudioService extends IAudioService.Stub {
public void readSettings() {
synchronized (VolumeStreamState.class) {
- // force maximum volume on all streams if fixed volume property is set
- if (mUseFixedVolume) {
+ // force maximum volume on all streams if fixed volume property
+ // or master volume property is set
+ if (mUseFixedVolume || mUseMasterVolume) {
mIndex.put(AudioSystem.DEVICE_OUT_DEFAULT, mIndexMax);
return;
}
@@ -3352,7 +3518,7 @@ public class AudioService extends IAudioService.Stub {
// only be stale values
if ((mStreamType == AudioSystem.STREAM_SYSTEM) ||
(mStreamType == AudioSystem.STREAM_SYSTEM_ENFORCED)) {
- int index = 10 * AudioManager.DEFAULT_STREAM_VOLUME[mStreamType];
+ int index = 10 * DEFAULT_STREAM_VOLUME[mStreamType];
synchronized (mCameraSoundForced) {
if (mCameraSoundForced) {
index = mIndexMax;
@@ -3376,7 +3542,7 @@ public class AudioService extends IAudioService.Stub {
// if no volume stored for current stream and device, use default volume if default
// device, continue otherwise
int defaultIndex = (device == AudioSystem.DEVICE_OUT_DEFAULT) ?
- AudioManager.DEFAULT_STREAM_VOLUME[mStreamType] : -1;
+ DEFAULT_STREAM_VOLUME[mStreamType] : -1;
int index = Settings.System.getIntForUser(
mContentResolver, name, defaultIndex, UserHandle.USER_CURRENT);
if (index == -1) {
@@ -3388,9 +3554,10 @@ public class AudioService extends IAudioService.Stub {
}
}
- public void applyDeviceVolume(int device) {
+ // must be called while synchronized VolumeStreamState.class
+ public void applyDeviceVolume_syncVSS(int device) {
int index;
- if (isMuted()) {
+ if (isMuted_syncVSS()) {
index = 0;
} else if (((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 && mAvrcpAbsVolSupported)
|| ((device & mFullVolumeDevices) != 0)) {
@@ -3406,7 +3573,7 @@ public class AudioService extends IAudioService.Stub {
// apply default volume first: by convention this will reset all
// devices volumes in audio policy manager to the supplied value
int index;
- if (isMuted()) {
+ if (isMuted_syncVSS()) {
index = 0;
} else {
index = (getIndex(AudioSystem.DEVICE_OUT_DEFAULT) + 5)/10;
@@ -3419,7 +3586,7 @@ public class AudioService extends IAudioService.Stub {
Map.Entry entry = (Map.Entry)i.next();
int device = ((Integer)entry.getKey()).intValue();
if (device != AudioSystem.DEVICE_OUT_DEFAULT) {
- if (isMuted()) {
+ if (isMuted_syncVSS()) {
index = 0;
} else if (((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0 &&
mAvrcpAbsVolSupported)
@@ -3531,12 +3698,12 @@ public class AudioService extends IAudioService.Stub {
public void mute(IBinder cb, boolean state) {
synchronized (VolumeStreamState.class) {
- VolumeDeathHandler handler = getDeathHandler(cb, state);
+ VolumeDeathHandler handler = getDeathHandler_syncVSS(cb, state);
if (handler == null) {
Log.e(TAG, "Could not get client death handler for stream: "+mStreamType);
return;
}
- handler.mute(state);
+ handler.mute_syncVSS(state);
}
}
@@ -3558,7 +3725,7 @@ public class AudioService extends IAudioService.Stub {
|| (((device & mFixedVolumeDevices) != 0) && index != 0)) {
entry.setValue(mIndexMax);
}
- applyDeviceVolume(device);
+ applyDeviceVolume_syncVSS(device);
}
}
}
@@ -3567,7 +3734,7 @@ public class AudioService extends IAudioService.Stub {
private int getValidIndex(int index) {
if (index < 0) {
return 0;
- } else if (mUseFixedVolume || index > mIndexMax) {
+ } else if (mUseFixedVolume || mUseMasterVolume || index > mIndexMax) {
return mIndexMax;
}
@@ -3582,8 +3749,8 @@ public class AudioService extends IAudioService.Stub {
mICallback = cb;
}
- // must be called while synchronized on parent VolumeStreamState
- public void mute(boolean state) {
+ // must be called while synchronized VolumeStreamState.class
+ public void mute_syncVSS(boolean state) {
boolean updateVolume = false;
if (state) {
if (mMuteCount == 0) {
@@ -3595,7 +3762,7 @@ public class AudioService extends IAudioService.Stub {
}
VolumeStreamState.this.mDeathHandlers.add(this);
// If the stream is not yet muted by any client, set level to 0
- if (!VolumeStreamState.this.isMuted()) {
+ if (!VolumeStreamState.this.isMuted_syncVSS()) {
updateVolume = true;
}
} catch (RemoteException e) {
@@ -3619,7 +3786,7 @@ public class AudioService extends IAudioService.Stub {
if (mICallback != null) {
mICallback.unlinkToDeath(this, 0);
}
- if (!VolumeStreamState.this.isMuted()) {
+ if (!VolumeStreamState.this.isMuted_syncVSS()) {
updateVolume = true;
}
}
@@ -3637,15 +3804,17 @@ public class AudioService extends IAudioService.Stub {
public void binderDied() {
Log.w(TAG, "Volume service client died for stream: "+mStreamType);
- if (mMuteCount != 0) {
- // Reset all active mute requests from this client.
- mMuteCount = 1;
- mute(false);
+ synchronized (VolumeStreamState.class) {
+ if (mMuteCount != 0) {
+ // Reset all active mute requests from this client.
+ mMuteCount = 1;
+ mute_syncVSS(false);
+ }
}
}
}
- private synchronized int muteCount() {
+ private int muteCount() {
int count = 0;
int size = mDeathHandlers.size();
for (int i = 0; i < size; i++) {
@@ -3654,12 +3823,13 @@ public class AudioService extends IAudioService.Stub {
return count;
}
- private synchronized boolean isMuted() {
+ // must be called while synchronized VolumeStreamState.class
+ private boolean isMuted_syncVSS() {
return muteCount() != 0;
}
- // only called by mute() which is already synchronized
- private VolumeDeathHandler getDeathHandler(IBinder cb, boolean state) {
+ // must be called while synchronized VolumeStreamState.class
+ private VolumeDeathHandler getDeathHandler_syncVSS(IBinder cb, boolean state) {
VolumeDeathHandler handler;
int size = mDeathHandlers.size();
for (int i = 0; i < size; i++) {
@@ -3736,25 +3906,26 @@ public class AudioService extends IAudioService.Stub {
private void setDeviceVolume(VolumeStreamState streamState, int device) {
- // Apply volume
- streamState.applyDeviceVolume(device);
-
- // Apply change to all streams using this one as alias
- int numStreamTypes = AudioSystem.getNumStreamTypes();
- for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- if (streamType != streamState.mStreamType &&
- mStreamVolumeAlias[streamType] == streamState.mStreamType) {
- // Make sure volume is also maxed out on A2DP device for aliased stream
- // that may have a different device selected
- int streamDevice = getDeviceForStream(streamType);
- if ((device != streamDevice) && mAvrcpAbsVolSupported &&
- ((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0)) {
- mStreamStates[streamType].applyDeviceVolume(device);
+ synchronized (VolumeStreamState.class) {
+ // Apply volume
+ streamState.applyDeviceVolume_syncVSS(device);
+
+ // Apply change to all streams using this one as alias
+ int numStreamTypes = AudioSystem.getNumStreamTypes();
+ for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
+ if (streamType != streamState.mStreamType &&
+ mStreamVolumeAlias[streamType] == streamState.mStreamType) {
+ // Make sure volume is also maxed out on A2DP device for aliased stream
+ // that may have a different device selected
+ int streamDevice = getDeviceForStream(streamType);
+ if ((device != streamDevice) && mAvrcpAbsVolSupported &&
+ ((device & AudioSystem.DEVICE_OUT_ALL_A2DP) != 0)) {
+ mStreamStates[streamType].applyDeviceVolume_syncVSS(device);
+ }
+ mStreamStates[streamType].applyDeviceVolume_syncVSS(streamDevice);
}
- mStreamStates[streamType].applyDeviceVolume(streamDevice);
}
}
-
// Post a persist volume msg
sendMsg(mAudioHandler,
MSG_PERSIST_VOLUME,
@@ -4067,7 +4238,7 @@ public class AudioService extends IAudioService.Stub {
case MSG_PERSIST_RINGER_MODE:
// note that the value persisted is the current ringer mode, not the
// value of ringer mode as of the time the request was made to persist
- persistRingerMode(getRingerMode());
+ persistRingerMode(getRingerModeInternal());
break;
case MSG_MEDIA_SERVER_DIED:
@@ -4119,7 +4290,7 @@ public class AudioService extends IAudioService.Stub {
}
// Restore ringer mode
- setRingerModeInt(getRingerMode(), false);
+ setRingerModeInt(getRingerModeInternal(), false);
// Restore master volume
restoreMasterVolume();
@@ -4150,6 +4321,13 @@ public class AudioService extends IAudioService.Stub {
}
}
}
+
+ synchronized (mAudioPolicies) {
+ for(AudioPolicyProxy policy : mAudioPolicies.values()) {
+ policy.connectMixes();
+ }
+ }
+
// indicate the end of reconfiguration phase to audio HAL
AudioSystem.setParameters("restarting=false");
break;
@@ -4289,7 +4467,7 @@ public class AudioService extends IAudioService.Stub {
* Ensure all stream types that should be affected by ringer mode
* are in the proper state.
*/
- setRingerModeInt(getRingerMode(), false);
+ setRingerModeInt(getRingerModeInternal(), false);
}
readDockAudioSettings(mContentResolver);
}
@@ -4534,7 +4712,12 @@ public class AudioService extends IAudioService.Stub {
if (mAudioHandler.hasMessages(MSG_SET_A2DP_SRC_CONNECTION_STATE) ||
mAudioHandler.hasMessages(MSG_SET_A2DP_SINK_CONNECTION_STATE) ||
mAudioHandler.hasMessages(MSG_SET_WIRED_DEVICE_CONNECTION_STATE)) {
- delay = 1000;
+ synchronized (mLastDeviceConnectMsgTime) {
+ long time = SystemClock.uptimeMillis();
+ if (mLastDeviceConnectMsgTime > time) {
+ delay = (int)(mLastDeviceConnectMsgTime - time);
+ }
+ }
}
return delay;
}
@@ -4565,7 +4748,8 @@ public class AudioService extends IAudioService.Stub {
} else if (device == AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET) {
connType = AudioRoutesInfo.MAIN_DOCK_SPEAKERS;
intent.setAction(AudioManager.ACTION_DIGITAL_AUDIO_DOCK_PLUG);
- } else if (device == AudioSystem.DEVICE_OUT_HDMI) {
+ } else if (device == AudioSystem.DEVICE_OUT_HDMI ||
+ device == AudioSystem.DEVICE_OUT_HDMI_ARC) {
connType = AudioRoutesInfo.MAIN_HDMI;
configureHdmiPlugIntent(intent, state);
}
@@ -4660,7 +4844,8 @@ public class AudioService extends IAudioService.Stub {
for (AudioPort port : ports) {
if (port instanceof AudioDevicePort) {
final AudioDevicePort devicePort = (AudioDevicePort) port;
- if (devicePort.type() == AudioManager.DEVICE_OUT_HDMI) {
+ if (devicePort.type() == AudioManager.DEVICE_OUT_HDMI ||
+ devicePort.type() == AudioManager.DEVICE_OUT_HDMI_ARC) {
// format the list of supported encodings
int[] formats = devicePort.formats();
if (formats.length > 0) {
@@ -4944,14 +5129,34 @@ public class AudioService extends IAudioService.Stub {
//==========================================================================================
// Audio Focus
//==========================================================================================
- public int requestAudioFocus(int mainStreamType, int durationHint, IBinder cb,
- IAudioFocusDispatcher fd, String clientId, String callingPackageName) {
- return mMediaFocusControl.requestAudioFocus(mainStreamType, durationHint, cb, fd,
- clientId, callingPackageName);
+ public int requestAudioFocus(AudioAttributes aa, int durationHint, IBinder cb,
+ IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags,
+ IAudioPolicyCallback pcb) {
+ // permission checks
+ if ((flags & AudioManager.AUDIOFOCUS_FLAG_LOCK) == AudioManager.AUDIOFOCUS_FLAG_LOCK) {
+ if (mMediaFocusControl.IN_VOICE_COMM_FOCUS_ID.equals(clientId)) {
+ if (PackageManager.PERMISSION_GRANTED != mContext.checkCallingOrSelfPermission(
+ android.Manifest.permission.MODIFY_PHONE_STATE)) {
+ Log.e(TAG, "Invalid permission to (un)lock audio focus", new Exception());
+ return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
+ }
+ } else {
+ // only a registered audio policy can be used to lock focus
+ synchronized (mAudioPolicies) {
+ if (!mAudioPolicies.containsKey(pcb.asBinder())) {
+ Log.e(TAG, "Invalid unregistered AudioPolicy to (un)lock audio focus");
+ return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
+ }
+ }
+ }
+ }
+
+ return mMediaFocusControl.requestAudioFocus(aa, durationHint, cb, fd,
+ clientId, callingPackageName, flags);
}
- public int abandonAudioFocus(IAudioFocusDispatcher fd, String clientId) {
- return mMediaFocusControl.abandonAudioFocus(fd, clientId);
+ public int abandonAudioFocus(IAudioFocusDispatcher fd, String clientId, AudioAttributes aa) {
+ return mMediaFocusControl.abandonAudioFocus(fd, clientId, aa);
}
public void unregisterAudioFocusClient(String clientId) {
@@ -4995,41 +5200,44 @@ public class AudioService extends IAudioService.Stub {
boolean cameraSoundForced = mContext.getResources().getBoolean(
com.android.internal.R.bool.config_camera_sound_forced);
synchronized (mSettingsLock) {
+ boolean cameraSoundForcedChanged = false;
synchronized (mCameraSoundForced) {
if (cameraSoundForced != mCameraSoundForced) {
mCameraSoundForced = cameraSoundForced;
-
- if (!isPlatformTelevision()) {
- VolumeStreamState s = mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED];
- if (cameraSoundForced) {
- s.setAllIndexesToMax();
- mRingerModeAffectedStreams &=
- ~(1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- } else {
- s.setAllIndexes(mStreamStates[AudioSystem.STREAM_SYSTEM]);
- mRingerModeAffectedStreams |=
- (1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
- }
- // take new state into account for streams muted by ringer mode
- setRingerModeInt(getRingerMode(), false);
+ cameraSoundForcedChanged = true;
+ }
+ }
+ if (cameraSoundForcedChanged) {
+ if (!isPlatformTelevision()) {
+ VolumeStreamState s = mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED];
+ if (cameraSoundForced) {
+ s.setAllIndexesToMax();
+ mRingerModeAffectedStreams &=
+ ~(1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
+ } else {
+ s.setAllIndexes(mStreamStates[AudioSystem.STREAM_SYSTEM]);
+ mRingerModeAffectedStreams |=
+ (1 << AudioSystem.STREAM_SYSTEM_ENFORCED);
}
-
- sendMsg(mAudioHandler,
- MSG_SET_FORCE_USE,
- SENDMSG_QUEUE,
- AudioSystem.FOR_SYSTEM,
- cameraSoundForced ?
- AudioSystem.FORCE_SYSTEM_ENFORCED : AudioSystem.FORCE_NONE,
- null,
- 0);
-
- sendMsg(mAudioHandler,
- MSG_SET_ALL_VOLUMES,
- SENDMSG_QUEUE,
- 0,
- 0,
- mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED], 0);
+ // take new state into account for streams muted by ringer mode
+ setRingerModeInt(getRingerModeInternal(), false);
}
+
+ sendMsg(mAudioHandler,
+ MSG_SET_FORCE_USE,
+ SENDMSG_QUEUE,
+ AudioSystem.FOR_SYSTEM,
+ cameraSoundForced ?
+ AudioSystem.FORCE_SYSTEM_ENFORCED : AudioSystem.FORCE_NONE,
+ null,
+ 0);
+
+ sendMsg(mAudioHandler,
+ MSG_SET_ALL_VOLUMES,
+ SENDMSG_QUEUE,
+ 0,
+ 0,
+ mStreamStates[AudioSystem.STREAM_SYSTEM_ENFORCED], 0);
}
}
mVolumeController.setLayoutDirection(config.getLayoutDirection());
@@ -5248,7 +5456,7 @@ public class AudioService extends IAudioService.Stub {
private boolean mHdmiSystemAudioSupported = false;
// Set only when device is tv.
private HdmiTvClient mHdmiTvClient;
- // true if the device has system feature PackageManager.FEATURE_TELEVISION.
+ // true if the device has system feature PackageManager.FEATURE_LEANBACK.
// cached HdmiControlManager interface
private HdmiControlManager mHdmiManager;
// Set only when device is a set-top box.
@@ -5353,11 +5561,13 @@ public class AudioService extends IAudioService.Stub {
private void dumpRingerMode(PrintWriter pw) {
pw.println("\nRinger mode: ");
- pw.println("- mode: "+RINGER_MODE_NAMES[mRingerMode]);
+ pw.println("- mode (internal) = " + RINGER_MODE_NAMES[mRingerMode]);
+ pw.println("- mode (external) = " + RINGER_MODE_NAMES[mRingerModeExternal]);
pw.print("- ringer mode affected streams = 0x");
pw.println(Integer.toHexString(mRingerModeAffectedStreams));
pw.print("- ringer mode muted streams = 0x");
pw.println(Integer.toHexString(mRingerModeMutedStreams));
+ pw.print("- delegate = "); pw.println(mRingerModeDelegate);
}
@Override
@@ -5379,6 +5589,9 @@ public class AudioService extends IAudioService.Stub {
pw.print(" mPendingVolumeCommand="); pw.println(mPendingVolumeCommand);
pw.print(" mMusicActiveMs="); pw.println(mMusicActiveMs);
pw.print(" mMcc="); pw.println(mMcc);
+ pw.print(" mHasVibrator="); pw.println(mHasVibrator);
+
+ dumpAudioPolicies(pw);
}
private static String safeMediaVolumeStateToString(Integer state) {
@@ -5577,6 +5790,13 @@ public class AudioService extends IAudioService.Stub {
* LocalServices.
*/
final class AudioServiceInternal extends AudioManagerInternal {
+ @Override
+ public void setRingerModeDelegate(RingerModeDelegate delegate) {
+ mRingerModeDelegate = delegate;
+ if (mRingerModeDelegate != null) {
+ setRingerModeInternal(getRingerModeInternal(), TAG + ".setRingerModeDelegate");
+ }
+ }
@Override
public void adjustSuggestedStreamVolumeForUid(int streamType, int direction, int flags,
@@ -5597,66 +5817,190 @@ public class AudioService extends IAudioService.Stub {
String callingPackage, int uid) {
setStreamVolume(streamType, direction, flags, callingPackage, uid);
}
+
+ @Override
+ public void adjustMasterVolumeForUid(int steps, int flags, String callingPackage,
+ int uid) {
+ adjustMasterVolume(steps, flags, callingPackage, uid);
+ }
+
+ @Override
+ public int getRingerModeInternal() {
+ return AudioService.this.getRingerModeInternal();
+ }
+
+ @Override
+ public void setRingerModeInternal(int ringerMode, String caller) {
+ AudioService.this.setRingerModeInternal(ringerMode, caller);
+ }
+
+ @Override
+ public void setMasterMuteForUid(boolean state, int flags, String callingPackage, IBinder cb,
+ int uid) {
+ setMasterMuteInternal(state, flags, callingPackage, cb, uid);
+ }
}
//==========================================================================================
// Audio policy management
//==========================================================================================
- public boolean registerAudioPolicy(AudioPolicyConfig policyConfig, IBinder cb) {
- //Log.v(TAG, "registerAudioPolicy for " + cb + " got policy:" + policyConfig);
+ public String registerAudioPolicy(AudioPolicyConfig policyConfig, IAudioPolicyCallback pcb,
+ boolean hasFocusListener) {
+ if (DEBUG_AP) Log.d(TAG, "registerAudioPolicy for " + pcb.asBinder()
+ + " with config:" + policyConfig);
+ String regId = null;
+ // error handling
boolean hasPermissionForPolicy =
- (PackageManager.PERMISSION_GRANTED == mContext.checkCallingOrSelfPermission(
+ (PackageManager.PERMISSION_GRANTED == mContext.checkCallingPermission(
android.Manifest.permission.MODIFY_AUDIO_ROUTING));
if (!hasPermissionForPolicy) {
Slog.w(TAG, "Can't register audio policy for pid " + Binder.getCallingPid() + " / uid "
+ Binder.getCallingUid() + ", need MODIFY_AUDIO_ROUTING");
- return false;
+ return null;
}
+
synchronized (mAudioPolicies) {
- AudioPolicyProxy app = new AudioPolicyProxy(policyConfig, cb);
try {
- cb.linkToDeath(app, 0/*flags*/);
- mAudioPolicies.put(cb, app);
+ if (mAudioPolicies.containsKey(pcb.asBinder())) {
+ Slog.e(TAG, "Cannot re-register policy");
+ return null;
+ }
+ AudioPolicyProxy app = new AudioPolicyProxy(policyConfig, pcb, hasFocusListener);
+ pcb.asBinder().linkToDeath(app, 0/*flags*/);
+ regId = app.getRegistrationId();
+ mAudioPolicies.put(pcb.asBinder(), app);
} catch (RemoteException e) {
// audio policy owner has already died!
- Slog.w(TAG, "Audio policy registration failed, could not link to " + cb +
+ Slog.w(TAG, "Audio policy registration failed, could not link to " + pcb +
" binder death", e);
- return false;
+ return null;
}
}
- // TODO implement registration with native audio policy (including permission check)
- return true;
+ return regId;
}
- public void unregisterAudioPolicyAsync(IBinder cb) {
+
+ public void unregisterAudioPolicyAsync(IAudioPolicyCallback pcb) {
+ if (DEBUG_AP) Log.d(TAG, "unregisterAudioPolicyAsync for " + pcb.asBinder());
synchronized (mAudioPolicies) {
- AudioPolicyProxy app = mAudioPolicies.remove(cb);
+ AudioPolicyProxy app = mAudioPolicies.remove(pcb.asBinder());
if (app == null) {
Slog.w(TAG, "Trying to unregister unknown audio policy for pid "
+ Binder.getCallingPid() + " / uid " + Binder.getCallingUid());
+ return;
} else {
- cb.unlinkToDeath(app, 0/*flags*/);
+ pcb.asBinder().unlinkToDeath(app, 0/*flags*/);
+ }
+ app.release();
+ }
+ // TODO implement clearing mix attribute matching info in native audio policy
+ }
+
+ public int setFocusPropertiesForPolicy(int duckingBehavior, IAudioPolicyCallback pcb) {
+ if (DEBUG_AP) Log.d(TAG, "setFocusPropertiesForPolicy() duck behavior=" + duckingBehavior
+ + " policy " + pcb.asBinder());
+ // error handling
+ boolean hasPermissionForPolicy =
+ (PackageManager.PERMISSION_GRANTED == mContext.checkCallingPermission(
+ android.Manifest.permission.MODIFY_AUDIO_ROUTING));
+ if (!hasPermissionForPolicy) {
+ Slog.w(TAG, "Cannot change audio policy ducking handling for pid " +
+ + Binder.getCallingPid() + " / uid "
+ + Binder.getCallingUid() + ", need MODIFY_AUDIO_ROUTING");
+ return AudioManager.ERROR;
+ }
+
+ synchronized (mAudioPolicies) {
+ if (!mAudioPolicies.containsKey(pcb.asBinder())) {
+ Slog.e(TAG, "Cannot change audio policy focus properties, unregistered policy");
+ return AudioManager.ERROR;
+ }
+ final AudioPolicyProxy app = mAudioPolicies.get(pcb.asBinder());
+ if (duckingBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
+ // is there already one policy managing ducking?
+ for(AudioPolicyProxy policy : mAudioPolicies.values()) {
+ if (policy.mFocusDuckBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
+ Slog.e(TAG, "Cannot change audio policy ducking behavior, already handled");
+ return AudioManager.ERROR;
+ }
+ }
}
+ app.mFocusDuckBehavior = duckingBehavior;
+ mMediaFocusControl.setDuckingInExtPolicyAvailable(
+ duckingBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY);
}
- // TODO implement registration with native audio policy
+ return AudioManager.SUCCESS;
}
- public class AudioPolicyProxy implements IBinder.DeathRecipient {
+ private void dumpAudioPolicies(PrintWriter pw) {
+ pw.println("\nAudio policies:");
+ synchronized (mAudioPolicies) {
+ for(AudioPolicyProxy policy : mAudioPolicies.values()) {
+ pw.println(policy.toLogFriendlyString());
+ }
+ }
+ }
+
+ //======================
+ // Audio policy proxy
+ //======================
+ /**
+ * This internal class inherits from AudioPolicyConfig, each instance contains all the
+ * mixes of an AudioPolicy and their configurations.
+ */
+ public class AudioPolicyProxy extends AudioPolicyConfig implements IBinder.DeathRecipient {
private static final String TAG = "AudioPolicyProxy";
AudioPolicyConfig mConfig;
- IBinder mToken;
- AudioPolicyProxy(AudioPolicyConfig config, IBinder token) {
- mConfig = config;
- mToken = token;
+ IAudioPolicyCallback mPolicyToken;
+ boolean mHasFocusListener;
+ /**
+ * Audio focus ducking behavior for an audio policy.
+ * This variable reflects the value that was successfully set in
+ * {@link AudioService#setFocusPropertiesForPolicy(int, IAudioPolicyCallback)}. This
+ * implies that a value of FOCUS_POLICY_DUCKING_IN_POLICY means the corresponding policy
+ * is handling ducking for audio focus.
+ */
+ int mFocusDuckBehavior = AudioPolicy.FOCUS_POLICY_DUCKING_DEFAULT;
+
+ AudioPolicyProxy(AudioPolicyConfig config, IAudioPolicyCallback token,
+ boolean hasFocusListener) {
+ super(config);
+ setRegistration(new String(config.hashCode() + ":ap:" + mAudioPolicyCounter++));
+ mPolicyToken = token;
+ mHasFocusListener = hasFocusListener;
+ if (mHasFocusListener) {
+ mMediaFocusControl.addFocusFollower(mPolicyToken);
+ }
+ connectMixes();
}
public void binderDied() {
synchronized (mAudioPolicies) {
- Log.v(TAG, "audio policy " + mToken + " died");
- mAudioPolicies.remove(mToken);
+ Log.i(TAG, "audio policy " + mPolicyToken + " died");
+ release();
+ mAudioPolicies.remove(mPolicyToken.asBinder());
}
}
+
+ String getRegistrationId() {
+ return getRegistration();
+ }
+
+ void release() {
+ if (mFocusDuckBehavior == AudioPolicy.FOCUS_POLICY_DUCKING_IN_POLICY) {
+ mMediaFocusControl.setDuckingInExtPolicyAvailable(false);
+ }
+ if (mHasFocusListener) {
+ mMediaFocusControl.removeFocusFollower(mPolicyToken);
+ }
+ AudioSystem.registerPolicyMixes(mMixes, false);
+ }
+
+ void connectMixes() {
+ AudioSystem.registerPolicyMixes(mMixes, true);
+ }
};
private HashMap<IBinder, AudioPolicyProxy> mAudioPolicies =
new HashMap<IBinder, AudioPolicyProxy>();
+ private int mAudioPolicyCounter = 0; // always accessed synchronized on mAudioPolicies
}
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 9a76f94..46ab7e0 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -16,6 +16,7 @@
package android.media;
+import android.media.audiopolicy.AudioMix;
import java.util.ArrayList;
/* IF YOU CHANGE ANY OF THE CONSTANTS IN THIS FILE, DO NOT FORGET
@@ -255,6 +256,7 @@ public class AudioSystem
public static final int DEVICE_OUT_SPDIF = 0x80000;
public static final int DEVICE_OUT_FM = 0x100000;
public static final int DEVICE_OUT_AUX_LINE = 0x200000;
+ public static final int DEVICE_OUT_SPEAKER_SAFE = 0x400000;
public static final int DEVICE_OUT_DEFAULT = DEVICE_BIT_DEFAULT;
@@ -280,6 +282,7 @@ public class AudioSystem
DEVICE_OUT_SPDIF |
DEVICE_OUT_FM |
DEVICE_OUT_AUX_LINE |
+ DEVICE_OUT_SPEAKER_SAFE |
DEVICE_OUT_DEFAULT);
public static final int DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP |
DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
@@ -372,6 +375,27 @@ public class AudioSystem
public static final String DEVICE_OUT_SPDIF_NAME = "spdif";
public static final String DEVICE_OUT_FM_NAME = "fm_transmitter";
public static final String DEVICE_OUT_AUX_LINE_NAME = "aux_line";
+ public static final String DEVICE_OUT_SPEAKER_SAFE_NAME = "speaker_safe";
+
+ public static final String DEVICE_IN_COMMUNICATION_NAME = "communication";
+ public static final String DEVICE_IN_AMBIENT_NAME = "ambient";
+ public static final String DEVICE_IN_BUILTIN_MIC_NAME = "mic";
+ public static final String DEVICE_IN_BLUETOOTH_SCO_HEADSET_NAME = "bt_sco_hs";
+ public static final String DEVICE_IN_WIRED_HEADSET_NAME = "headset";
+ public static final String DEVICE_IN_AUX_DIGITAL_NAME = "aux_digital";
+ public static final String DEVICE_IN_TELEPHONY_RX_NAME = "telephony_rx";
+ public static final String DEVICE_IN_BACK_MIC_NAME = "back_mic";
+ public static final String DEVICE_IN_REMOTE_SUBMIX_NAME = "remote_submix";
+ public static final String DEVICE_IN_ANLG_DOCK_HEADSET_NAME = "analog_dock";
+ public static final String DEVICE_IN_DGTL_DOCK_HEADSET_NAME = "digital_dock";
+ public static final String DEVICE_IN_USB_ACCESSORY_NAME = "usb_accessory";
+ public static final String DEVICE_IN_USB_DEVICE_NAME = "usb_device";
+ public static final String DEVICE_IN_FM_TUNER_NAME = "fm_tuner";
+ public static final String DEVICE_IN_TV_TUNER_NAME = "tv_tuner";
+ public static final String DEVICE_IN_LINE_NAME = "line";
+ public static final String DEVICE_IN_SPDIF_NAME = "spdif";
+ public static final String DEVICE_IN_BLUETOOTH_A2DP_NAME = "bt_a2dp";
+ public static final String DEVICE_IN_LOOPBACK_NAME = "loopback";
public static String getOutputDeviceName(int device)
{
@@ -420,12 +444,60 @@ public class AudioSystem
return DEVICE_OUT_FM_NAME;
case DEVICE_OUT_AUX_LINE:
return DEVICE_OUT_AUX_LINE_NAME;
+ case DEVICE_OUT_SPEAKER_SAFE:
+ return DEVICE_OUT_SPEAKER_SAFE_NAME;
case DEVICE_OUT_DEFAULT:
default:
- return "";
+ return Integer.toString(device);
}
}
+ public static String getInputDeviceName(int device)
+ {
+ switch(device) {
+ case DEVICE_IN_COMMUNICATION:
+ return DEVICE_IN_COMMUNICATION_NAME;
+ case DEVICE_IN_AMBIENT:
+ return DEVICE_IN_AMBIENT_NAME;
+ case DEVICE_IN_BUILTIN_MIC:
+ return DEVICE_IN_BUILTIN_MIC_NAME;
+ case DEVICE_IN_BLUETOOTH_SCO_HEADSET:
+ return DEVICE_IN_BLUETOOTH_SCO_HEADSET_NAME;
+ case DEVICE_IN_WIRED_HEADSET:
+ return DEVICE_IN_WIRED_HEADSET_NAME;
+ case DEVICE_IN_AUX_DIGITAL:
+ return DEVICE_IN_AUX_DIGITAL_NAME;
+ case DEVICE_IN_TELEPHONY_RX:
+ return DEVICE_IN_TELEPHONY_RX_NAME;
+ case DEVICE_IN_BACK_MIC:
+ return DEVICE_IN_BACK_MIC_NAME;
+ case DEVICE_IN_REMOTE_SUBMIX:
+ return DEVICE_IN_REMOTE_SUBMIX_NAME;
+ case DEVICE_IN_ANLG_DOCK_HEADSET:
+ return DEVICE_IN_ANLG_DOCK_HEADSET_NAME;
+ case DEVICE_IN_DGTL_DOCK_HEADSET:
+ return DEVICE_IN_DGTL_DOCK_HEADSET_NAME;
+ case DEVICE_IN_USB_ACCESSORY:
+ return DEVICE_IN_USB_ACCESSORY_NAME;
+ case DEVICE_IN_USB_DEVICE:
+ return DEVICE_IN_USB_DEVICE_NAME;
+ case DEVICE_IN_FM_TUNER:
+ return DEVICE_IN_FM_TUNER_NAME;
+ case DEVICE_IN_TV_TUNER:
+ return DEVICE_IN_TV_TUNER_NAME;
+ case DEVICE_IN_LINE:
+ return DEVICE_IN_LINE_NAME;
+ case DEVICE_IN_SPDIF:
+ return DEVICE_IN_SPDIF_NAME;
+ case DEVICE_IN_BLUETOOTH_A2DP:
+ return DEVICE_IN_BLUETOOTH_A2DP_NAME;
+ case DEVICE_IN_LOOPBACK:
+ return DEVICE_IN_LOOPBACK_NAME;
+ case DEVICE_IN_DEFAULT:
+ default:
+ return Integer.toString(device);
+ }
+ }
// phone state, match audio_mode???
public static final int PHONE_STATE_OFFCALL = 0;
@@ -495,5 +567,7 @@ public class AudioSystem
public static final int AUDIO_HW_SYNC_INVALID = 0;
public static native int getAudioHwSyncForSession(int sessionId);
+
+ public static native int registerPolicyMixes(ArrayList<AudioMix> mixes, boolean register);
}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index f37cbe5..547d87e 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -1070,7 +1070,7 @@ public class AudioTrack
* {@link #ERROR_INVALID_OPERATION}
*/
public int setPlaybackHeadPosition(int positionInFrames) {
- if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
+ if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
@@ -1100,7 +1100,7 @@ public class AudioTrack
* {@link #ERROR_INVALID_OPERATION}
*/
public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
- if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
+ if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
diff --git a/media/java/android/media/FocusRequester.java b/media/java/android/media/FocusRequester.java
index 9a39994..bbe5fd2 100644
--- a/media/java/android/media/FocusRequester.java
+++ b/media/java/android/media/FocusRequester.java
@@ -16,6 +16,7 @@
package android.media;
+import android.annotation.NonNull;
import android.media.MediaFocusControl.AudioFocusDeathHandler;
import android.os.IBinder;
import android.util.Log;
@@ -40,24 +41,43 @@ class FocusRequester {
private final String mClientId;
private final String mPackageName;
private final int mCallingUid;
+ private final MediaFocusControl mFocusController; // never null
/**
* the audio focus gain request that caused the addition of this object in the focus stack.
*/
private final int mFocusGainRequest;
/**
+ * the flags associated with the gain request that qualify the type of grant (e.g. accepting
+ * delay vs grant must be immediate)
+ */
+ private final int mGrantFlags;
+ /**
* the audio focus loss received my mFocusDispatcher, is AudioManager.AUDIOFOCUS_NONE if
* it never lost focus.
*/
private int mFocusLossReceived;
/**
- * the stream type associated with the focus request
+ * the audio attributes associated with the focus request
*/
- private final int mStreamType;
+ private final AudioAttributes mAttributes;
- FocusRequester(int streamType, int focusRequest,
+ /**
+ * Class constructor
+ * @param aa
+ * @param focusRequest
+ * @param grantFlags
+ * @param afl
+ * @param source
+ * @param id
+ * @param hdlr
+ * @param pn
+ * @param uid
+ * @param ctlr cannot be null
+ */
+ FocusRequester(AudioAttributes aa, int focusRequest, int grantFlags,
IAudioFocusDispatcher afl, IBinder source, String id, AudioFocusDeathHandler hdlr,
- String pn, int uid) {
- mStreamType = streamType;
+ String pn, int uid, @NonNull MediaFocusControl ctlr) {
+ mAttributes = aa;
mFocusDispatcher = afl;
mSourceRef = source;
mClientId = id;
@@ -65,7 +85,9 @@ class FocusRequester {
mPackageName = pn;
mCallingUid = uid;
mFocusGainRequest = focusRequest;
+ mGrantFlags = grantFlags;
mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
+ mFocusController = ctlr;
}
@@ -77,6 +99,10 @@ class FocusRequester {
}
}
+ boolean isLockedFocusOwner() {
+ return ((mGrantFlags & AudioManager.AUDIOFOCUS_FLAG_LOCK) != 0);
+ }
+
boolean hasSameBinder(IBinder ib) {
return (mSourceRef != null) && mSourceRef.equals(ib);
}
@@ -93,13 +119,20 @@ class FocusRequester {
return mCallingUid == uid;
}
+ String getClientId() {
+ return mClientId;
+ }
int getGainRequest() {
return mFocusGainRequest;
}
- int getStreamType() {
- return mStreamType;
+ int getGrantFlags() {
+ return mGrantFlags;
+ }
+
+ AudioAttributes getAudioAttributes() {
+ return mAttributes;
}
@@ -134,14 +167,31 @@ class FocusRequester {
return focusChangeToString(mFocusLossReceived);
}
+ private static String flagsToString(int flags) {
+ String msg = new String();
+ if ((flags & AudioManager.AUDIOFOCUS_FLAG_DELAY_OK) != 0) {
+ msg += "DELAY_OK";
+ }
+ if ((flags & AudioManager.AUDIOFOCUS_FLAG_LOCK) != 0) {
+ if (!msg.isEmpty()) { msg += "|"; }
+ msg += "LOCK";
+ }
+ if ((flags & AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS) != 0) {
+ if (!msg.isEmpty()) { msg += "|"; }
+ msg += "PAUSES_ON_DUCKABLE_LOSS";
+ }
+ return msg;
+ }
+
void dump(PrintWriter pw) {
pw.println(" source:" + mSourceRef
+ " -- pack: " + mPackageName
+ " -- client: " + mClientId
+ " -- gain: " + focusGainToString()
+ + " -- flags: " + flagsToString(mGrantFlags)
+ " -- loss: " + focusLossToString()
+ " -- uid: " + mCallingUid
- + " -- stream: " + mStreamType);
+ + " -- attr: " + mAttributes);
}
@@ -204,13 +254,22 @@ class FocusRequester {
}
}
+ /**
+ * Called synchronized on MediaFocusControl.mAudioFocusLock
+ */
void handleExternalFocusGain(int focusGain) {
int focusLoss = focusLossForGainRequest(focusGain);
handleFocusLoss(focusLoss);
}
+ /**
+ * Called synchronized on MediaFocusControl.mAudioFocusLock
+ */
void handleFocusGain(int focusGain) {
try {
+ mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
+ mFocusController.notifyExtPolicyFocusGrant_syncAf(toAudioFocusInfo(),
+ AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
if (mFocusDispatcher != null) {
if (DEBUG) {
Log.v(TAG, "dispatching " + focusChangeToString(focusGain) + " to "
@@ -218,27 +277,52 @@ class FocusRequester {
}
mFocusDispatcher.dispatchAudioFocusChange(focusGain, mClientId);
}
- mFocusLossReceived = AudioManager.AUDIOFOCUS_NONE;
} catch (android.os.RemoteException e) {
Log.e(TAG, "Failure to signal gain of audio focus due to: ", e);
}
}
+ /**
+ * Called synchronized on MediaFocusControl.mAudioFocusLock
+ */
void handleFocusLoss(int focusLoss) {
try {
if (focusLoss != mFocusLossReceived) {
+ mFocusLossReceived = focusLoss;
+ // before dispatching a focus loss, check if the following conditions are met:
+ // 1/ the framework is not supposed to notify the focus loser on a DUCK loss
+ // 2/ it is a DUCK loss
+ // 3/ the focus loser isn't flagged as pausing in a DUCK loss
+ // if they are, do not notify the focus loser
+ if (!mFocusController.mustNotifyFocusOwnerOnDuck()
+ && mFocusLossReceived == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK
+ && (mGrantFlags
+ & AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS) == 0) {
+ if (DEBUG) {
+ Log.v(TAG, "NOT dispatching " + focusChangeToString(mFocusLossReceived)
+ + " to " + mClientId + ", to be handled externally");
+ }
+ mFocusController.notifyExtPolicyFocusLoss_syncAf(
+ toAudioFocusInfo(), false /* wasDispatched */);
+ return;
+ }
if (mFocusDispatcher != null) {
if (DEBUG) {
- Log.v(TAG, "dispatching " + focusChangeToString(focusLoss) + " to "
+ Log.v(TAG, "dispatching " + focusChangeToString(mFocusLossReceived) + " to "
+ mClientId);
}
- mFocusDispatcher.dispatchAudioFocusChange(focusLoss, mClientId);
+ mFocusController.notifyExtPolicyFocusLoss_syncAf(
+ toAudioFocusInfo(), true /* wasDispatched */);
+ mFocusDispatcher.dispatchAudioFocusChange(mFocusLossReceived, mClientId);
}
- mFocusLossReceived = focusLoss;
}
} catch (android.os.RemoteException e) {
Log.e(TAG, "Failure to signal loss of audio focus due to:", e);
}
}
+ AudioFocusInfo toAudioFocusInfo() {
+ return new AudioFocusInfo(mAttributes, mClientId, mPackageName,
+ mFocusGainRequest, mFocusLossReceived, mGrantFlags);
+ }
}
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index 39b074e..fad3cec 100644
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -19,6 +19,7 @@ package android.media;
import android.app.PendingIntent;
import android.bluetooth.BluetoothDevice;
import android.content.ComponentName;
+import android.media.AudioAttributes;
import android.media.AudioRoutesInfo;
import android.media.IAudioFocusDispatcher;
import android.media.IAudioRoutesObserver;
@@ -29,6 +30,7 @@ import android.media.IRingtonePlayer;
import android.media.IVolumeController;
import android.media.Rating;
import android.media.audiopolicy.AudioPolicyConfig;
+import android.media.audiopolicy.IAudioPolicyCallback;
import android.net.Uri;
import android.view.KeyEvent;
@@ -76,9 +78,15 @@ interface IAudioService {
void setMicrophoneMute(boolean on, String callingPackage);
- void setRingerMode(int ringerMode, boolean checkZen);
+ void setRingerModeExternal(int ringerMode, String caller);
- int getRingerMode();
+ void setRingerModeInternal(int ringerMode, String caller);
+
+ int getRingerModeExternal();
+
+ int getRingerModeInternal();
+
+ boolean isValidRingerMode(int ringerMode);
void setVibrateSetting(int vibrateType, int vibrateSetting);
@@ -114,10 +122,11 @@ interface IAudioService {
boolean isBluetoothA2dpOn();
- int requestAudioFocus(int mainStreamType, int durationHint, IBinder cb,
- IAudioFocusDispatcher fd, String clientId, String callingPackageName);
+ int requestAudioFocus(in AudioAttributes aa, int durationHint, IBinder cb,
+ IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags,
+ IAudioPolicyCallback pcb);
- int abandonAudioFocus(IAudioFocusDispatcher fd, String clientId);
+ int abandonAudioFocus(IAudioFocusDispatcher fd, String clientId, in AudioAttributes aa);
void unregisterAudioFocusClient(String clientId);
@@ -205,6 +214,9 @@ interface IAudioService {
boolean isHdmiSystemAudioSupported();
- boolean registerAudioPolicy(in AudioPolicyConfig policyConfig, IBinder cb);
- oneway void unregisterAudioPolicyAsync(in IBinder cb);
+ String registerAudioPolicy(in AudioPolicyConfig policyConfig,
+ in IAudioPolicyCallback pcb, boolean hasFocusListener);
+ oneway void unregisterAudioPolicyAsync(in IAudioPolicyCallback pcb);
+
+ int setFocusPropertiesForPolicy(int duckingBehavior, in IAudioPolicyCallback pcb);
}
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index b541454..8d6a588 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -26,6 +26,7 @@ import android.view.Surface;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
+import java.nio.NioUtils;
/**
* <p>The ImageReader class allows direct application access to image data
@@ -577,7 +578,11 @@ public class ImageReader implements AutoCloseable {
@Override
public int getWidth() {
if (mIsImageValid) {
- return ImageReader.this.mWidth;
+ if (mWidth == -1) {
+ mWidth = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getWidth() :
+ nativeGetWidth();
+ }
+ return mWidth;
} else {
throw new IllegalStateException("Image is already released");
}
@@ -586,7 +591,11 @@ public class ImageReader implements AutoCloseable {
@Override
public int getHeight() {
if (mIsImageValid) {
- return ImageReader.this.mHeight;
+ if (mHeight == -1) {
+ mHeight = (getFormat() == ImageFormat.JPEG) ? ImageReader.this.getHeight() :
+ nativeGetHeight();
+ }
+ return mHeight;
} else {
throw new IllegalStateException("Image is already released");
}
@@ -688,6 +697,15 @@ public class ImageReader implements AutoCloseable {
}
private void clearBuffer() {
+ // Need null check first, as the getBuffer() may not be called before an image
+ // is closed.
+ if (mBuffer == null) {
+ return;
+ }
+
+ if (mBuffer.isDirect()) {
+ NioUtils.freeDirectBuffer(mBuffer);
+ }
mBuffer = null;
}
@@ -711,9 +729,13 @@ public class ImageReader implements AutoCloseable {
private SurfacePlane[] mPlanes;
private boolean mIsImageValid;
+ private int mHeight = -1;
+ private int mWidth = -1;
private synchronized native ByteBuffer nativeImageGetBuffer(int idx, int readerFormat);
private synchronized native SurfacePlane nativeCreatePlane(int idx, int readerFormat);
+ private synchronized native int nativeGetWidth();
+ private synchronized native int nativeGetHeight();
}
private synchronized native void nativeInit(Object weakSelf, int w, int h,
diff --git a/media/java/android/media/MediaCodecInfo.java b/media/java/android/media/MediaCodecInfo.java
index 4513643..6984575 100644
--- a/media/java/android/media/MediaCodecInfo.java
+++ b/media/java/android/media/MediaCodecInfo.java
@@ -126,6 +126,7 @@ public final class MediaCodecInfo {
new Rational(Integer.MAX_VALUE, 1));
private static final Range<Integer> SIZE_RANGE = Range.create(1, 32768);
private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960);
+ private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000);
// found stuff that is not supported by framework (=> this should not happen)
private static final int ERROR_UNRECOGNIZED = (1 << 0);
@@ -711,7 +712,7 @@ public final class MediaCodecInfo {
}
if (info.containsKey("bitrate-range")) {
bitRates = bitRates.intersect(
- Utils.parseIntRange(info.getString("bitrate"), bitRates));
+ Utils.parseIntRange(info.getString("bitrate-range"), bitRates));
}
applyLimits(maxInputChannels, bitRates);
}
@@ -1061,7 +1062,7 @@ public final class MediaCodecInfo {
}
private void initWithPlatformLimits() {
- mBitrateRange = Range.create(0, Integer.MAX_VALUE);
+ mBitrateRange = BITRATE_RANGE;
mWidthRange = SIZE_RANGE;
mHeightRange = SIZE_RANGE;
@@ -1090,7 +1091,7 @@ public final class MediaCodecInfo {
Size blockSize = new Size(mBlockWidth, mBlockHeight);
Size alignment = new Size(mWidthAlignment, mHeightAlignment);
Range<Integer> counts = null, widths = null, heights = null;
- Range<Integer> frameRates = null;
+ Range<Integer> frameRates = null, bitRates = null;
Range<Long> blockRates = null;
Range<Rational> ratios = null, blockRatios = null;
@@ -1148,6 +1149,16 @@ public final class MediaCodecInfo {
frameRates = null;
}
}
+ bitRates = Utils.parseIntRange(map.get("bitrate-range"), null);
+ if (bitRates != null) {
+ try {
+ bitRates = bitRates.intersect(BITRATE_RANGE);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "bitrate range (" + bitRates
+ + ") is out of limits: " + BITRATE_RANGE);
+ bitRates = null;
+ }
+ }
checkPowerOfTwo(
blockSize.getWidth(), "block-size width must be power of two");
@@ -1196,6 +1207,9 @@ public final class MediaCodecInfo {
if (frameRates != null) {
mFrameRateRange = FRAME_RATE_RANGE.intersect(frameRates);
}
+ if (bitRates != null) {
+ mBitrateRange = BITRATE_RANGE.intersect(bitRates);
+ }
} else {
// no unsupported profile/levels, so restrict values to known limits
if (widths != null) {
@@ -1226,6 +1240,9 @@ public final class MediaCodecInfo {
if (frameRates != null) {
mFrameRateRange = mFrameRateRange.intersect(frameRates);
}
+ if (bitRates != null) {
+ mBitrateRange = mBitrateRange.intersect(bitRates);
+ }
}
updateLimits();
}
diff --git a/media/java/android/media/MediaDrm.java b/media/java/android/media/MediaDrm.java
index 3b8cb19..78a5abe 100644
--- a/media/java/android/media/MediaDrm.java
+++ b/media/java/android/media/MediaDrm.java
@@ -572,6 +572,12 @@ public final class MediaDrm {
*/
public native List<byte[]> getSecureStops();
+ /**
+ * Access secure stop by secure stop ID.
+ *
+ * @param ssid - The secure stop ID provided by the license server.
+ */
+ public native byte[] getSecureStop(byte[] ssid);
/**
* Process the SecureStop server response message ssRelease. After authenticating
@@ -581,6 +587,10 @@ public final class MediaDrm {
*/
public native void releaseSecureStops(byte[] ssRelease);
+ /**
+ * Remove all secure stops without requiring interaction with the server.
+ */
+ public native void releaseAllSecureStops();
/**
* String property name: identifies the maker of the DRM engine plugin
diff --git a/media/java/android/media/MediaFocusControl.java b/media/java/android/media/MediaFocusControl.java
index c67e397..6518bd1 100644
--- a/media/java/android/media/MediaFocusControl.java
+++ b/media/java/android/media/MediaFocusControl.java
@@ -33,6 +33,7 @@ import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.database.ContentObserver;
import android.media.PlayerRecord.RemotePlaybackState;
+import android.media.audiopolicy.IAudioPolicyCallback;
import android.net.Uri;
import android.os.Binder;
import android.os.Bundle;
@@ -390,7 +391,8 @@ public class MediaFocusControl implements OnFinished {
// AudioFocus
//==========================================================================================
- /* constant to identify focus stack entry that is used to hold the focus while the phone
+ /**
+ * Constant to identify a focus stack entry that is used to hold the focus while the phone
* is ringing or during a call. Used by com.android.internal.telephony.CallManager when
* entering and exiting calls.
*/
@@ -433,6 +435,9 @@ public class MediaFocusControl implements OnFinished {
}
}
+ /**
+ * Called synchronized on mAudioFocusLock
+ */
private void notifyTopOfAudioFocusStack() {
// notify the top of the stack it gained focus
if (!mFocusStack.empty()) {
@@ -469,6 +474,7 @@ public class MediaFocusControl implements OnFinished {
stackIterator.next().dump(pw);
}
}
+ pw.println("\n Notify on duck: " + mNotifyFocusOwnerOnDuck +"\n");
}
/**
@@ -479,13 +485,19 @@ public class MediaFocusControl implements OnFinished {
* @param signal if true and the listener was at the top of the focus stack, i.e. it was holding
* focus, notify the next item in the stack it gained focus.
*/
- private void removeFocusStackEntry(String clientToRemove, boolean signal) {
+ private void removeFocusStackEntry(String clientToRemove, boolean signal,
+ boolean notifyFocusFollowers) {
// is the current top of the focus stack abandoning focus? (because of request, not death)
if (!mFocusStack.empty() && mFocusStack.peek().hasSameClient(clientToRemove))
{
//Log.i(TAG, " removeFocusStackEntry() removing top of stack");
FocusRequester fr = mFocusStack.pop();
fr.release();
+ if (notifyFocusFollowers) {
+ final AudioFocusInfo afi = fr.toAudioFocusInfo();
+ afi.clearLossReceived();
+ notifyExtPolicyFocusLoss_syncAf(afi, false);
+ }
if (signal) {
// notify the new top of the stack it gained focus
notifyTopOfAudioFocusStack();
@@ -538,16 +550,54 @@ public class MediaFocusControl implements OnFinished {
/**
* Helper function:
* Returns true if the system is in a state where the focus can be reevaluated, false otherwise.
+ * The implementation guarantees that a state where focus cannot be immediately reassigned
+ * implies that an "locked" focus owner is at the top of the focus stack.
+ * Modifications to the implementation that break this assumption will cause focus requests to
+ * misbehave when honoring the AudioManager.AUDIOFOCUS_FLAG_DELAY_OK flag.
*/
private boolean canReassignAudioFocus() {
// focus requests are rejected during a phone call or when the phone is ringing
// this is equivalent to IN_VOICE_COMM_FOCUS_ID having the focus
- if (!mFocusStack.isEmpty() && mFocusStack.peek().hasSameClient(IN_VOICE_COMM_FOCUS_ID)) {
+ if (!mFocusStack.isEmpty() && isLockedFocusOwner(mFocusStack.peek())) {
return false;
}
return true;
}
+ private boolean isLockedFocusOwner(FocusRequester fr) {
+ return (fr.hasSameClient(IN_VOICE_COMM_FOCUS_ID) || fr.isLockedFocusOwner());
+ }
+
+ /**
+ * Helper function
+ * Pre-conditions: focus stack is not empty, there is one or more locked focus owner
+ * at the top of the focus stack
+ * Push the focus requester onto the audio focus stack at the first position immediately
+ * following the locked focus owners.
+ * @return {@link AudioManager#AUDIOFOCUS_REQUEST_GRANTED} or
+ * {@link AudioManager#AUDIOFOCUS_REQUEST_DELAYED}
+ */
+ private int pushBelowLockedFocusOwners(FocusRequester nfr) {
+ int lastLockedFocusOwnerIndex = mFocusStack.size();
+ for (int index = mFocusStack.size()-1; index >= 0; index--) {
+ if (isLockedFocusOwner(mFocusStack.elementAt(index))) {
+ lastLockedFocusOwnerIndex = index;
+ }
+ }
+ if (lastLockedFocusOwnerIndex == mFocusStack.size()) {
+ // this should not happen, but handle it and log an error
+ Log.e(TAG, "No exclusive focus owner found in propagateFocusLossFromGain_syncAf()",
+ new Exception());
+ // no exclusive owner, push at top of stack, focus is granted, propagate change
+ propagateFocusLossFromGain_syncAf(nfr.getGainRequest());
+ mFocusStack.push(nfr);
+ return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
+ } else {
+ mFocusStack.insertElementAt(nfr, lastLockedFocusOwnerIndex);
+ return AudioManager.AUDIOFOCUS_REQUEST_DELAYED;
+ }
+ }
+
/**
* Inner class to monitor audio focus client deaths, and remove them from the audio focus
* stack if necessary.
@@ -571,6 +621,86 @@ public class MediaFocusControl implements OnFinished {
}
}
+ /**
+ * Indicates whether to notify an audio focus owner when it loses focus
+ * with {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK} if it will only duck.
+ * This variable being false indicates an AudioPolicy has been registered and has signaled
+ * it will handle audio ducking.
+ */
+ private boolean mNotifyFocusOwnerOnDuck = true;
+
+ protected void setDuckingInExtPolicyAvailable(boolean available) {
+ mNotifyFocusOwnerOnDuck = !available;
+ }
+
+ boolean mustNotifyFocusOwnerOnDuck() { return mNotifyFocusOwnerOnDuck; }
+
+ private ArrayList<IAudioPolicyCallback> mFocusFollowers = new ArrayList<IAudioPolicyCallback>();
+
+ void addFocusFollower(IAudioPolicyCallback ff) {
+ if (ff == null) {
+ return;
+ }
+ synchronized(mAudioFocusLock) {
+ boolean found = false;
+ for (IAudioPolicyCallback pcb : mFocusFollowers) {
+ if (pcb.asBinder().equals(ff.asBinder())) {
+ found = true;
+ break;
+ }
+ }
+ if (found) {
+ return;
+ } else {
+ mFocusFollowers.add(ff);
+ }
+ }
+ }
+
+ void removeFocusFollower(IAudioPolicyCallback ff) {
+ if (ff == null) {
+ return;
+ }
+ synchronized(mAudioFocusLock) {
+ for (IAudioPolicyCallback pcb : mFocusFollowers) {
+ if (pcb.asBinder().equals(ff.asBinder())) {
+ mFocusFollowers.remove(pcb);
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Called synchronized on mAudioFocusLock
+ */
+ void notifyExtPolicyFocusGrant_syncAf(AudioFocusInfo afi, int requestResult) {
+ for (IAudioPolicyCallback pcb : mFocusFollowers) {
+ try {
+ // oneway
+ pcb.notifyAudioFocusGrant(afi, requestResult);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Can't call newAudioFocusLoser() on IAudioPolicyCallback "
+ + pcb.asBinder(), e);
+ }
+ }
+ }
+
+ /**
+ * Called synchronized on mAudioFocusLock
+ */
+ void notifyExtPolicyFocusLoss_syncAf(AudioFocusInfo afi, boolean wasDispatched) {
+ for (IAudioPolicyCallback pcb : mFocusFollowers) {
+ try {
+ // oneway
+ pcb.notifyAudioFocusLoss(afi, wasDispatched);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Can't call newAudioFocusLoser() on IAudioPolicyCallback "
+ + pcb.asBinder(), e);
+ }
+ }
+ }
+
protected int getCurrentAudioFocus() {
synchronized(mAudioFocusLock) {
if (mFocusStack.empty()) {
@@ -581,10 +711,11 @@ public class MediaFocusControl implements OnFinished {
}
}
- /** @see AudioManager#requestAudioFocus(AudioManager.OnAudioFocusChangeListener, int, int) */
- protected int requestAudioFocus(int mainStreamType, int focusChangeHint, IBinder cb,
- IAudioFocusDispatcher fd, String clientId, String callingPackageName) {
- Log.i(TAG, " AudioFocus requestAudioFocus() from " + clientId);
+ /** @see AudioManager#requestAudioFocus(AudioManager.OnAudioFocusChangeListener, int, int, int) */
+ protected int requestAudioFocus(AudioAttributes aa, int focusChangeHint, IBinder cb,
+ IAudioFocusDispatcher fd, String clientId, String callingPackageName, int flags) {
+ Log.i(TAG, " AudioFocus requestAudioFocus() from " + clientId + " req=" + focusChangeHint +
+ "flags=0x" + Integer.toHexString(flags));
// we need a valid binder callback for clients
if (!cb.pingBinder()) {
Log.e(TAG, " AudioFocus DOA client for requestAudioFocus(), aborting.");
@@ -597,8 +728,16 @@ public class MediaFocusControl implements OnFinished {
}
synchronized(mAudioFocusLock) {
+ boolean focusGrantDelayed = false;
if (!canReassignAudioFocus()) {
- return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
+ if ((flags & AudioManager.AUDIOFOCUS_FLAG_DELAY_OK) == 0) {
+ return AudioManager.AUDIOFOCUS_REQUEST_FAILED;
+ } else {
+ // request has AUDIOFOCUS_FLAG_DELAY_OK: focus can't be
+ // granted right now, so the requester will be inserted in the focus stack
+ // to receive focus later
+ focusGrantDelayed = true;
+ }
}
// handle the potential premature death of the new holder of the focus
@@ -616,42 +755,64 @@ public class MediaFocusControl implements OnFinished {
if (!mFocusStack.empty() && mFocusStack.peek().hasSameClient(clientId)) {
// if focus is already owned by this client and the reason for acquiring the focus
// hasn't changed, don't do anything
- if (mFocusStack.peek().getGainRequest() == focusChangeHint) {
+ final FocusRequester fr = mFocusStack.peek();
+ if (fr.getGainRequest() == focusChangeHint && fr.getGrantFlags() == flags) {
// unlink death handler so it can be gc'ed.
// linkToDeath() creates a JNI global reference preventing collection.
cb.unlinkToDeath(afdh, 0);
+ notifyExtPolicyFocusGrant_syncAf(fr.toAudioFocusInfo(),
+ AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
}
// the reason for the audio focus request has changed: remove the current top of
// stack and respond as if we had a new focus owner
- FocusRequester fr = mFocusStack.pop();
- fr.release();
+ if (!focusGrantDelayed) {
+ mFocusStack.pop();
+ // the entry that was "popped" is the same that was "peeked" above
+ fr.release();
+ }
}
// focus requester might already be somewhere below in the stack, remove it
- removeFocusStackEntry(clientId, false /* signal */);
+ removeFocusStackEntry(clientId, false /* signal */, false /*notifyFocusFollowers*/);
+
+ final FocusRequester nfr = new FocusRequester(aa, focusChangeHint, flags, fd, cb,
+ clientId, afdh, callingPackageName, Binder.getCallingUid(), this);
+ if (focusGrantDelayed) {
+ // focusGrantDelayed being true implies we can't reassign focus right now
+ // which implies the focus stack is not empty.
+ final int requestResult = pushBelowLockedFocusOwners(nfr);
+ if (requestResult != AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
+ notifyExtPolicyFocusGrant_syncAf(nfr.toAudioFocusInfo(), requestResult);
+ }
+ return requestResult;
+ } else {
+ // propagate the focus change through the stack
+ if (!mFocusStack.empty()) {
+ propagateFocusLossFromGain_syncAf(focusChangeHint);
+ }
- // propagate the focus change through the stack
- if (!mFocusStack.empty()) {
- propagateFocusLossFromGain_syncAf(focusChangeHint);
+ // push focus requester at the top of the audio focus stack
+ mFocusStack.push(nfr);
}
-
- // push focus requester at the top of the audio focus stack
- mFocusStack.push(new FocusRequester(mainStreamType, focusChangeHint, fd, cb,
- clientId, afdh, callingPackageName, Binder.getCallingUid()));
+ notifyExtPolicyFocusGrant_syncAf(nfr.toAudioFocusInfo(),
+ AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
}//synchronized(mAudioFocusLock)
return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
}
- /** @see AudioManager#abandonAudioFocus(AudioManager.OnAudioFocusChangeListener) */
- protected int abandonAudioFocus(IAudioFocusDispatcher fl, String clientId) {
+ /**
+ * @see AudioManager#abandonAudioFocus(AudioManager.OnAudioFocusChangeListener, AudioAttributes)
+ * */
+ protected int abandonAudioFocus(IAudioFocusDispatcher fl, String clientId, AudioAttributes aa) {
+ // AudioAttributes are currently ignored, to be used for zones
Log.i(TAG, " AudioFocus abandonAudioFocus() from " + clientId);
try {
// this will take care of notifying the new focus owner if needed
synchronized(mAudioFocusLock) {
- removeFocusStackEntry(clientId, true /*signal*/);
+ removeFocusStackEntry(clientId, true /*signal*/, true /*notifyFocusFollowers*/);
}
} catch (java.util.ConcurrentModificationException cme) {
// Catching this exception here is temporary. It is here just to prevent
@@ -667,7 +828,7 @@ public class MediaFocusControl implements OnFinished {
protected void unregisterAudioFocusClient(String clientId) {
synchronized(mAudioFocusLock) {
- removeFocusStackEntry(clientId, false);
+ removeFocusStackEntry(clientId, false, true /*notifyFocusFollowers*/);
}
}
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index 2036533..4356a3e 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -106,6 +106,10 @@ public final class MediaFormat {
public static final String MIMETYPE_AUDIO_FLAC = "audio/flac";
public static final String MIMETYPE_AUDIO_MSGSM = "audio/gsm";
public static final String MIMETYPE_AUDIO_AC3 = "audio/ac3";
+ /**
+ * @hide
+ */
+ public static final String MIMETYPE_AUDIO_EAC3 = "audio/eac3";
/**
* MIME type for WebVTT subtitle data.
@@ -398,7 +402,7 @@ public final class MediaFormat {
* The associated value is an integer. These values are device and codec specific,
* but lower values generally result in faster and/or less power-hungry encoding.
*
- * @see MediaCodecInfo.CodecCapabilities.EncoderCapabilities#getComplexityRange
+ * @see MediaCodecInfo.EncoderCapabilities#getComplexityRange()
*/
public static final String KEY_COMPLEXITY = "complexity";
@@ -411,7 +415,7 @@ public final class MediaFormat {
*
* @hide
*
- * @see MediaCodecInfo.CodecCapabilities.EncoderCapabilities#getQualityRange
+ * @see MediaCodecInfo.EncoderCapabilities#getQualityRange()
*/
public static final String KEY_QUALITY = "quality";
@@ -428,7 +432,7 @@ public final class MediaFormat {
* A key describing the desired bitrate mode to be used by an encoder.
* Constants are declared in {@link MediaCodecInfo.CodecCapabilities}.
*
- * @see MediaCodecInfo.CodecCapabilities.EncoderCapabilities#isBitrateModeSupported
+ * @see MediaCodecInfo.EncoderCapabilities#isBitrateModeSupported(int)
*/
public static final String KEY_BITRATE_MODE = "bitrate-mode";
diff --git a/media/java/android/media/MediaHTTPConnection.java b/media/java/android/media/MediaHTTPConnection.java
index d0f3334..b2886bb 100644
--- a/media/java/android/media/MediaHTTPConnection.java
+++ b/media/java/android/media/MediaHTTPConnection.java
@@ -31,6 +31,7 @@ import java.net.URL;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.NoRouteToHostException;
+import java.net.ProtocolException;
import java.util.HashMap;
import java.util.Map;
@@ -282,7 +283,7 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
if (offset > 0 && response != HttpURLConnection.HTTP_PARTIAL) {
// Some servers simply ignore "Range" requests and serve
// data from the start of the content.
- throw new IOException();
+ throw new ProtocolException();
}
mInputStream =
@@ -330,6 +331,9 @@ public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
}
return n;
+ } catch (ProtocolException e) {
+ Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
+ return MEDIA_ERROR_UNSUPPORTED;
} catch (NoRouteToHostException e) {
Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
return MEDIA_ERROR_UNSUPPORTED;
diff --git a/media/java/android/media/MediaMetadata.java b/media/java/android/media/MediaMetadata.java
index b4e6033..754da0e 100644
--- a/media/java/android/media/MediaMetadata.java
+++ b/media/java/android/media/MediaMetadata.java
@@ -16,8 +16,6 @@
package android.media;
import android.annotation.NonNull;
-import android.annotation.Nullable;
-import android.content.ContentProviderClient;
import android.content.ContentResolver;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
@@ -25,16 +23,14 @@ import android.media.browse.MediaBrowser;
import android.media.session.MediaController;
import android.net.Uri;
import android.os.Bundle;
-import android.os.CancellationSignal;
-import android.os.OperationCanceledException;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.ArrayMap;
import android.util.Log;
-import android.util.Size;
import android.util.SparseArray;
+import java.util.ArrayList;
import java.util.Set;
/**
@@ -122,6 +118,10 @@ public final class MediaMetadata implements Parcelable {
/**
* The artwork for the media as a {@link Bitmap}.
+ * <p>
+ * The artwork should be relatively small and may be scaled down by the
+ * system if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_ART_URI} should be used instead.
*/
public static final String METADATA_KEY_ART = "android.media.metadata.ART";
@@ -129,12 +129,20 @@ public final class MediaMetadata implements Parcelable {
* The artwork for the media as a Uri formatted String. The artwork can be
* loaded using a combination of {@link ContentResolver#openInputStream} and
* {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
*/
public static final String METADATA_KEY_ART_URI = "android.media.metadata.ART_URI";
/**
* The artwork for the album of the media's original source as a
* {@link Bitmap}.
+ * <p>
+ * The artwork should be relatively small and may be scaled down by the
+ * system if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_ALBUM_ART_URI} should be used instead.
*/
public static final String METADATA_KEY_ALBUM_ART = "android.media.metadata.ALBUM_ART";
@@ -143,6 +151,10 @@ public final class MediaMetadata implements Parcelable {
* formatted String. The artwork can be loaded using a combination of
* {@link ContentResolver#openInputStream} and
* {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
*/
public static final String METADATA_KEY_ALBUM_ART_URI = "android.media.metadata.ALBUM_ART_URI";
@@ -188,6 +200,10 @@ public final class MediaMetadata implements Parcelable {
* An icon or thumbnail that is suitable for display to the user. When
* displaying an icon for media described by this metadata this should be
* preferred to other fields if present. This must be a {@link Bitmap}.
+ * <p>
+ * The icon should be relatively small and may be scaled down by the system
+ * if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_DISPLAY_ICON_URI} should be used instead.
*/
public static final String METADATA_KEY_DISPLAY_ICON
= "android.media.metadata.DISPLAY_ICON";
@@ -199,6 +215,10 @@ public final class MediaMetadata implements Parcelable {
* fields when present. The icon can be loaded using a combination of
* {@link ContentResolver#openInputStream} and
* {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
*/
public static final String METADATA_KEY_DISPLAY_ICON_URI
= "android.media.metadata.DISPLAY_ICON_URI";
@@ -545,6 +565,29 @@ public final class MediaMetadata implements Parcelable {
}
/**
+ * Create a Builder using a {@link MediaMetadata} instance to set
+ * initial values, but replace bitmaps with a scaled down copy if they
+ * are larger than maxBitmapSize.
+ *
+ * @param source The original metadata to copy.
+ * @param maxBitmapSize The maximum height/width for bitmaps contained
+ * in the metadata.
+ * @hide
+ */
+ public Builder(MediaMetadata source, int maxBitmapSize) {
+ this(source);
+ for (String key : mBundle.keySet()) {
+ Object value = mBundle.get(key);
+ if (value != null && value instanceof Bitmap) {
+ Bitmap bmp = (Bitmap) value;
+ if (bmp.getHeight() > maxBitmapSize || bmp.getWidth() > maxBitmapSize) {
+ putBitmap(key, scaleBitmap(bmp, maxBitmapSize));
+ }
+ }
+ }
+ }
+
+ /**
* Put a CharSequence value into the metadata. Custom keys may be used,
* but if the METADATA_KEYs defined in this class are used they may only
* be one of the following:
@@ -602,6 +645,11 @@ public final class MediaMetadata implements Parcelable {
* <li>{@link #METADATA_KEY_DISPLAY_DESCRIPTION}</li>
* <li>{@link #METADATA_KEY_DISPLAY_ICON_URI}</li>
* </ul>
+ * <p>
+ * Uris for artwork should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork
+ * through {@link ContentResolver#openTypedAssetFileDescriptor(Uri,
+ * String, Bundle)}.
*
* @param key The key for referencing this value
* @param value The String value to store
@@ -678,6 +726,10 @@ public final class MediaMetadata implements Parcelable {
* <li>{@link #METADATA_KEY_ALBUM_ART}</li>
* <li>{@link #METADATA_KEY_DISPLAY_ICON}</li>
* </ul>
+ * <p>
+ * Large bitmaps may be scaled down by the system. To pass full
+ * resolution images {@link Uri Uris} should be used with
+ * {@link #putString}.
*
* @param key The key for referencing this value
* @param value The Bitmap to store
@@ -702,5 +754,15 @@ public final class MediaMetadata implements Parcelable {
public MediaMetadata build() {
return new MediaMetadata(mBundle);
}
+
+ private Bitmap scaleBitmap(Bitmap bmp, int maxSize) {
+ float maxSizeF = maxSize;
+ float widthScale = maxSizeF / bmp.getWidth();
+ float heightScale = maxSizeF / bmp.getHeight();
+ float scale = Math.min(widthScale, heightScale);
+ int height = (int) (bmp.getHeight() * scale);
+ int width = (int) (bmp.getWidth() * scale);
+ return Bitmap.createScaledBitmap(bmp, width, height, true);
+ }
}
}
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index afa0b6e..615dac2 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -37,6 +37,7 @@ import android.os.Process;
import android.os.PowerManager;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.provider.Settings;
import android.system.ErrnoException;
import android.system.OsConstants;
import android.util.Log;
@@ -60,6 +61,7 @@ import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -968,11 +970,19 @@ public class MediaPlayer implements SubtitleController.Listener
* @throws IllegalStateException if it is called in an invalid state
*/
public void setDataSource(Context context, Uri uri, Map<String, String> headers)
- throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
- String scheme = uri.getScheme();
- if(scheme == null || scheme.equals("file")) {
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ final String scheme = uri.getScheme();
+ if (ContentResolver.SCHEME_FILE.equals(scheme)) {
setDataSource(uri.getPath());
return;
+ } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)
+ && Settings.AUTHORITY.equals(uri.getAuthority())) {
+ // Redirect ringtones to go directly to underlying provider
+ uri = RingtoneManager.getActualDefaultRingtoneUri(context,
+ RingtoneManager.getDefaultType(uri));
+ if (uri == null) {
+ throw new FileNotFoundException("Failed to resolve default ringtone");
+ }
}
AssetFileDescriptor fd = null;
@@ -1634,7 +1644,7 @@ public class MediaPlayer implements SubtitleController.Listener
* Attaches an auxiliary effect to the player. A typical auxiliary effect is a reverberation
* effect which can be applied on any sound source that directs a certain amount of its
* energy to this effect. This amount is defined by setAuxEffectSendLevel().
- * {@see #setAuxEffectSendLevel(float)}.
+ * See {@link #setAuxEffectSendLevel(float)}.
* <p>After creating an auxiliary effect (e.g.
* {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
* {@link android.media.audiofx.AudioEffect#getId()} and use it when calling this method
@@ -1648,8 +1658,8 @@ public class MediaPlayer implements SubtitleController.Listener
/**
- * Sets the send level of the player to the attached auxiliary effect
- * {@see #attachAuxEffect(int)}. The level value range is 0 to 1.0.
+ * Sets the send level of the player to the attached auxiliary effect.
+ * See {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
* <p>By default the send level is 0, so even if an effect is attached to the player
* this method must be called for the effect to be applied.
* <p>Note that the passed level value is a raw scalar. UI controls should be scaled
@@ -2277,9 +2287,9 @@ public class MediaPlayer implements SubtitleController.Listener
* when {@code trackType} is not one of audio, video, or subtitle.
* @throws IllegalStateException if called after {@link #release()}
*
- * @see {@link #getTrackInfo()}
- * @see {@link #selectTrack(int)}
- * @see {@link #deselectTrack(int)}
+ * @see #getTrackInfo()
+ * @see #selectTrack(int)
+ * @see #deselectTrack(int)
*/
public int getSelectedTrack(int trackType) throws IllegalStateException {
if (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE && mSubtitleController != null) {
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index a77bb96..81d5afe 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -222,6 +222,14 @@ public class MediaRecorder
public static final int REMOTE_SUBMIX = 8;
/**
+ * Audio source for FM, which is used to capture current FM tuner output by FMRadio app.
+ * There are two use cases, one is for record FM stream for later listening, another is
+ * for FM indirect mode(the routing except FM to headset(headphone) device routing).
+ * @hide
+ */
+ public static final int FM_TUNER = 1998;
+
+ /**
* Audio source for preemptible, low-priority software hotword detection
* It presents the same gain and pre processing tuning as {@link #VOICE_RECOGNITION}.
* <p>
diff --git a/media/java/android/media/MediaScannerConnection.java b/media/java/android/media/MediaScannerConnection.java
index 273eb64..d714672 100644
--- a/media/java/android/media/MediaScannerConnection.java
+++ b/media/java/android/media/MediaScannerConnection.java
@@ -228,7 +228,7 @@ public class MediaScannerConnection implements ServiceConnection {
* @param callback Optional callback through which you can receive the
* scanned URI and MIME type; If null, the file will be scanned but
* you will not get a result back.
- * @see scanFile(String, String)
+ * @see #scanFile(String, String)
*/
public static void scanFile(Context context, String[] paths, String[] mimeTypes,
OnScanCompletedListener callback) {
diff --git a/media/java/android/media/Ringtone.java b/media/java/android/media/Ringtone.java
index 7d075ba..8441541 100644
--- a/media/java/android/media/Ringtone.java
+++ b/media/java/android/media/Ringtone.java
@@ -51,6 +51,12 @@ public class Ringtone {
private final Context mContext;
private final AudioManager mAudioManager;
+
+ /**
+ * Flag indicating if we're allowed to fall back to remote playback using
+ * {@link #mRemotePlayer}. Typically this is false when we're the remote
+ * player and there is nobody else to delegate to.
+ */
private final boolean mAllowRemote;
private final IRingtonePlayer mRemotePlayer;
private final Binder mRemoteToken;
@@ -211,12 +217,7 @@ public class Ringtone {
mLocalPlayer.setAudioAttributes(mAudioAttributes);
mLocalPlayer.prepare();
- } catch (SecurityException e) {
- destroyLocalPlayer();
- if (!mAllowRemote) {
- Log.w(TAG, "Remote playback not allowed: " + e);
- }
- } catch (IOException e) {
+ } catch (SecurityException | IOException e) {
destroyLocalPlayer();
if (!mAllowRemote) {
Log.w(TAG, "Remote playback not allowed: " + e);
diff --git a/media/java/android/media/audiofx/AudioEffect.java b/media/java/android/media/audiofx/AudioEffect.java
index 9fa3f50..a8b9686 100644
--- a/media/java/android/media/audiofx/AudioEffect.java
+++ b/media/java/android/media/audiofx/AudioEffect.java
@@ -483,6 +483,10 @@ public class AudioEffect {
*/
public static boolean isEffectTypeAvailable(UUID type) {
AudioEffect.Descriptor[] desc = AudioEffect.queryEffects();
+ if (desc == null) {
+ return false;
+ }
+
for (int i = 0; i < desc.length; i++) {
if (desc[i].type.equals(type)) {
return true;
diff --git a/media/java/android/media/audiopolicy/AudioMix.java b/media/java/android/media/audiopolicy/AudioMix.java
index f7967f1..1806662 100644
--- a/media/java/android/media/audiopolicy/AudioMix.java
+++ b/media/java/android/media/audiopolicy/AudioMix.java
@@ -17,20 +17,25 @@
package android.media.audiopolicy;
import android.annotation.IntDef;
+import android.annotation.SystemApi;
import android.media.AudioFormat;
import android.media.AudioSystem;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
+import java.util.Objects;
/**
- * @hide CANDIDATE FOR PUBLIC API
+ * @hide
*/
+@SystemApi
public class AudioMix {
private AudioMixingRule mRule;
private AudioFormat mFormat;
private int mRouteFlags;
+ private String mRegistrationId;
+ private int mMixType = MIX_TYPE_INVALID;
/**
* All parameters are guaranteed valid through the Builder.
@@ -39,20 +44,40 @@ public class AudioMix {
mRule = rule;
mFormat = format;
mRouteFlags = routeFlags;
+ mRegistrationId = null;
+ mMixType = rule.getTargetMixType();
}
/**
* An audio mix behavior where the output of the mix is sent to the original destination of
* the audio signal, i.e. an output device for an output mix, or a recording for an input mix.
*/
+ @SystemApi
public static final int ROUTE_FLAG_RENDER = 0x1;
/**
* An audio mix behavior where the output of the mix is rerouted back to the framework and
- * is accessible for injection or capture through the {@link Audiotrack} and {@link AudioRecord}
+ * is accessible for injection or capture through the {@link AudioTrack} and {@link AudioRecord}
* APIs.
*/
+ @SystemApi
public static final int ROUTE_FLAG_LOOP_BACK = 0x1 << 1;
+ /**
+ * @hide
+ * Invalid mix type, default value.
+ */
+ public static final int MIX_TYPE_INVALID = -1;
+ /**
+ * @hide
+ * Mix type indicating playback streams are mixed.
+ */
+ public static final int MIX_TYPE_PLAYERS = 0;
+ /**
+ * @hide
+ * Mix type indicating recording streams are mixed.
+ */
+ public static final int MIX_TYPE_RECORDERS = 1;
+
int getRouteFlags() {
return mRouteFlags;
}
@@ -66,6 +91,26 @@ public class AudioMix {
}
/** @hide */
+ public int getMixType() {
+ return mMixType;
+ }
+
+ void setRegistration(String regId) {
+ mRegistrationId = regId;
+ }
+
+ /** @hide */
+ public String getRegistration() {
+ return mRegistrationId;
+ }
+
+ /** @hide */
+ @Override
+ public int hashCode() {
+ return Objects.hash(mRouteFlags, mRule, mMixType, mFormat);
+ }
+
+ /** @hide */
@IntDef(flag = true,
value = { ROUTE_FLAG_RENDER, ROUTE_FLAG_LOOP_BACK } )
@Retention(RetentionPolicy.SOURCE)
@@ -75,6 +120,7 @@ public class AudioMix {
* Builder class for {@link AudioMix} objects
*
*/
+ @SystemApi
public static class Builder {
private AudioMixingRule mRule = null;
private AudioFormat mFormat = null;
@@ -91,6 +137,7 @@ public class AudioMix {
* @param rule a non-null {@link AudioMixingRule} instance.
* @throws IllegalArgumentException
*/
+ @SystemApi
public Builder(AudioMixingRule rule)
throws IllegalArgumentException {
if (rule == null) {
@@ -121,6 +168,7 @@ public class AudioMix {
* @return the same Builder instance.
* @throws IllegalArgumentException
*/
+ @SystemApi
public Builder setFormat(AudioFormat format)
throws IllegalArgumentException {
if (format == null) {
@@ -137,6 +185,7 @@ public class AudioMix {
* @return the same Builder instance.
* @throws IllegalArgumentException
*/
+ @SystemApi
public Builder setRouteFlags(@RouteFlags int routeFlags)
throws IllegalArgumentException {
if (routeFlags == 0) {
@@ -155,6 +204,7 @@ public class AudioMix {
* @return a new {@link AudioMix} object
* @throws IllegalArgumentException if no {@link AudioMixingRule} has been set.
*/
+ @SystemApi
public AudioMix build() throws IllegalArgumentException {
if (mRule == null) {
throw new IllegalArgumentException("Illegal null AudioMixingRule");
diff --git a/media/java/android/media/audiopolicy/AudioMixingRule.java b/media/java/android/media/audiopolicy/AudioMixingRule.java
index ced7881..02b03d2 100644
--- a/media/java/android/media/audiopolicy/AudioMixingRule.java
+++ b/media/java/android/media/audiopolicy/AudioMixingRule.java
@@ -16,14 +16,17 @@
package android.media.audiopolicy;
+import android.annotation.SystemApi;
import android.media.AudioAttributes;
+import android.os.Parcel;
import java.util.ArrayList;
import java.util.Iterator;
+import java.util.Objects;
/**
- * @hide CANDIDATE FOR PUBLIC API
+ * @hide
*
* Here's an example of creating a mixing rule for all media playback:
* <pre>
@@ -35,44 +38,114 @@ import java.util.Iterator;
* .build();
* </pre>
*/
+@SystemApi
public class AudioMixingRule {
- private AudioMixingRule(ArrayList<AttributeMatchCriterion> criteria) {
+ private AudioMixingRule(int mixType, ArrayList<AttributeMatchCriterion> criteria) {
mCriteria = criteria;
+ mTargetMixType = mixType;
}
/**
- * A rule requiring the usage information of the {@link AudioAttributes} to match
+ * A rule requiring the usage information of the {@link AudioAttributes} to match.
*/
+ @SystemApi
public static final int RULE_MATCH_ATTRIBUTE_USAGE = 0x1;
/**
- * A rule requiring the usage information of the {@link AudioAttributes} to differ
+ * A rule requiring the capture preset information of the {@link AudioAttributes} to match.
*/
- public static final int RULE_EXCLUDE_ATTRIBUTE_USAGE = 0x1 << 1;
+ @SystemApi
+ public static final int RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET = 0x1 << 1;
+
+ private final static int RULE_EXCLUSION_MASK = 0x8000;
+ /**
+ * @hide
+ * A rule requiring the usage information of the {@link AudioAttributes} to differ.
+ */
+ public static final int RULE_EXCLUDE_ATTRIBUTE_USAGE =
+ RULE_EXCLUSION_MASK | RULE_MATCH_ATTRIBUTE_USAGE;
+ /**
+ * @hide
+ * A rule requiring the capture preset information of the {@link AudioAttributes} to differ.
+ */
+ public static final int RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET =
+ RULE_EXCLUSION_MASK | RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
static final class AttributeMatchCriterion {
AudioAttributes mAttr;
int mRule;
+ /** input parameters must be valid */
AttributeMatchCriterion(AudioAttributes attributes, int rule) {
mAttr = attributes;
mRule = rule;
}
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mAttr, mRule);
+ }
+
+ void writeToParcel(Parcel dest) {
+ dest.writeInt(mRule);
+ if ((mRule == RULE_MATCH_ATTRIBUTE_USAGE) || (mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE)) {
+ dest.writeInt(mAttr.getUsage());
+ } else {
+ // capture preset rule
+ dest.writeInt(mAttr.getCapturePreset());
+ }
+ }
}
- private ArrayList<AttributeMatchCriterion> mCriteria;
+ private final int mTargetMixType;
+ int getTargetMixType() { return mTargetMixType; }
+ private final ArrayList<AttributeMatchCriterion> mCriteria;
ArrayList<AttributeMatchCriterion> getCriteria() { return mCriteria; }
+ @Override
+ public int hashCode() {
+ return Objects.hash(mTargetMixType, mCriteria);
+ }
+
+ private static boolean isValidSystemApiRule(int rule) {
+ switch(rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean isValidIntRule(int rule) {
+ switch(rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_EXCLUDE_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ case RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean isPlayerRule(int rule) {
+ return ((rule == RULE_MATCH_ATTRIBUTE_USAGE)
+ || (rule == RULE_EXCLUDE_ATTRIBUTE_USAGE));
+ }
+
/**
* Builder class for {@link AudioMixingRule} objects
- *
*/
+ @SystemApi
public static class Builder {
private ArrayList<AttributeMatchCriterion> mCriteria;
+ private int mTargetMixType = AudioMix.MIX_TYPE_INVALID;
/**
* Constructs a new Builder with no rules.
*/
+ @SystemApi
public Builder() {
mCriteria = new ArrayList<AttributeMatchCriterion>();
}
@@ -81,18 +154,80 @@ public class AudioMixingRule {
* Add a rule for the selection of which streams are mixed together.
* @param attrToMatch a non-null AudioAttributes instance for which a contradictory
* rule hasn't been set yet.
- * @param rule one of {@link AudioMixingRule#RULE_EXCLUDE_ATTRIBUTE_USAGE},
- * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE}.
+ * @param rule {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE} or
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}.
* @return the same Builder instance.
* @throws IllegalArgumentException
*/
+ @SystemApi
public Builder addRule(AudioAttributes attrToMatch, int rule)
throws IllegalArgumentException {
+ if (!isValidSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return addRuleInt(attrToMatch, rule);
+ }
+
+ /**
+ * Add a rule by exclusion for the selection of which streams are mixed together.
+ * <br>For instance the following code
+ * <br><pre>
+ * AudioAttributes mediaAttr = new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .build();
+ * AudioMixingRule noMediaRule = new AudioMixingRule.Builder()
+ * .excludeRule(mediaAttr, AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE)
+ * .build();
+ * </pre>
+ * <br>will create a rule which maps to any usage value, except USAGE_MEDIA.
+ * @param attrToMatch a non-null AudioAttributes instance for which a contradictory
+ * rule hasn't been set yet.
+ * @param rule {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE} or
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder excludeRule(AudioAttributes attrToMatch, int rule)
+ throws IllegalArgumentException {
+ if (!isValidSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return addRuleInt(attrToMatch, rule | RULE_EXCLUSION_MASK);
+ }
+
+ /**
+ * Add or exclude a rule for the selection of which streams are mixed together.
+ * @param attrToMatch a non-null AudioAttributes instance for which a contradictory
+ * rule hasn't been set yet.
+ * @param rule one of {@link AudioMixingRule#RULE_EXCLUDE_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET} or
+ * {@link AudioMixingRule#RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ Builder addRuleInt(AudioAttributes attrToMatch, int rule)
+ throws IllegalArgumentException {
if (attrToMatch == null) {
throw new IllegalArgumentException("Illegal null AudioAttributes argument");
}
- if ((rule != RULE_MATCH_ATTRIBUTE_USAGE) && (rule != RULE_EXCLUDE_ATTRIBUTE_USAGE)) {
+ if (!isValidIntRule(rule)) {
throw new IllegalArgumentException("Illegal rule value " + rule);
+ } else {
+ // as rules are added to the Builder, we verify they are consistent with the type
+ // of mix being built. When adding the first rule, the mix type is MIX_TYPE_INVALID.
+ if (mTargetMixType == AudioMix.MIX_TYPE_INVALID) {
+ if (isPlayerRule(rule)) {
+ mTargetMixType = AudioMix.MIX_TYPE_PLAYERS;
+ } else {
+ mTargetMixType = AudioMix.MIX_TYPE_RECORDERS;
+ }
+ } else if (((mTargetMixType == AudioMix.MIX_TYPE_PLAYERS) && !isPlayerRule(rule))
+ || ((mTargetMixType == AudioMix.MIX_TYPE_RECORDERS) && isPlayerRule(rule)))
+ {
+ throw new IllegalArgumentException("Incompatible rule for mix");
+ }
}
synchronized (mCriteria) {
Iterator<AttributeMatchCriterion> crIterator = mCriteria.iterator();
@@ -111,6 +246,19 @@ public class AudioMixingRule {
+ attrToMatch);
}
}
+ } else if ((rule == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
+ || (rule == RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET)) {
+ // "capture preset"-base rule
+ if (criterion.mAttr.getCapturePreset() == attrToMatch.getCapturePreset()) {
+ if (criterion.mRule == rule) {
+ // rule already exists, we're done
+ return this;
+ } else {
+ // criterion already exists with a another rule, it is incompatible
+ throw new IllegalArgumentException("Contradictory rule exists for "
+ + attrToMatch);
+ }
+ }
}
}
// rule didn't exist, add it
@@ -119,13 +267,32 @@ public class AudioMixingRule {
return this;
}
+ Builder addRuleFromParcel(Parcel in) throws IllegalArgumentException {
+ int rule = in.readInt();
+ AudioAttributes attr;
+ if ((rule == RULE_MATCH_ATTRIBUTE_USAGE) || (rule == RULE_EXCLUDE_ATTRIBUTE_USAGE)) {
+ int usage = in.readInt();
+ attr = new AudioAttributes.Builder()
+ .setUsage(usage).build();
+ } else if ((rule == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
+ || (rule == RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET)) {
+ int preset = in.readInt();
+ attr = new AudioAttributes.Builder()
+ .setInternalCapturePreset(preset).build();
+ } else {
+ in.readInt(); // assume there was in int value to read as for now they come in pair
+ throw new IllegalArgumentException("Illegal rule value " + rule + " in parcel");
+ }
+ return addRuleInt(attr, rule);
+ }
+
/**
* Combines all of the matching and exclusion rules that have been set and return a new
* {@link AudioMixingRule} object.
* @return a new {@link AudioMixingRule} object
*/
public AudioMixingRule build() {
- return new AudioMixingRule(mCriteria);
+ return new AudioMixingRule(mTargetMixType, mCriteria);
}
}
}
diff --git a/media/java/android/media/audiopolicy/AudioPolicy.java b/media/java/android/media/audiopolicy/AudioPolicy.java
index 314eb88..f128044 100644
--- a/media/java/android/media/audiopolicy/AudioPolicy.java
+++ b/media/java/android/media/audiopolicy/AudioPolicy.java
@@ -17,71 +17,126 @@
package android.media.audiopolicy;
import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioAttributes;
+import android.media.AudioFocusInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.IAudioService;
+import android.media.MediaRecorder;
import android.os.Binder;
+import android.os.Handler;
import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.RemoteException;
+import android.os.ServiceManager;
import android.util.Log;
+import android.util.Slog;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
/**
- * @hide CANDIDATE FOR PUBLIC API
+ * @hide
* AudioPolicy provides access to the management of audio routing and audio focus.
*/
+@SystemApi
public class AudioPolicy {
private static final String TAG = "AudioPolicy";
+ private static final boolean DEBUG = false;
+ private final Object mLock = new Object();
/**
- * The status of an audio policy that cannot be used because it is invalid.
- */
- public static final int POLICY_STATUS_INVALID = 0;
- /**
* The status of an audio policy that is valid but cannot be used because it is not registered.
*/
+ @SystemApi
public static final int POLICY_STATUS_UNREGISTERED = 1;
/**
* The status of an audio policy that is valid, successfully registered and thus active.
*/
+ @SystemApi
public static final int POLICY_STATUS_REGISTERED = 2;
private int mStatus;
- private AudioPolicyStatusListener mStatusListener = null;
+ private String mRegistrationId;
+ private AudioPolicyStatusListener mStatusListener;
- private final IBinder mToken = new Binder();
- /** @hide */
- public IBinder token() { return mToken; }
+ /**
+ * The behavior of a policy with regards to audio focus where it relies on the application
+ * to do the ducking, the is the legacy and default behavior.
+ */
+ @SystemApi
+ public static final int FOCUS_POLICY_DUCKING_IN_APP = 0;
+ public static final int FOCUS_POLICY_DUCKING_DEFAULT = FOCUS_POLICY_DUCKING_IN_APP;
+ /**
+ * The behavior of a policy with regards to audio focus where it handles ducking instead
+ * of the application losing focus and being signaled it can duck (as communicated by
+ * {@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}).
+ * <br>Can only be used after having set a listener with
+ * {@link AudioPolicy#setAudioPolicyFocusListener(AudioPolicyFocusListener)}.
+ */
+ @SystemApi
+ public static final int FOCUS_POLICY_DUCKING_IN_POLICY = 1;
+
+ private AudioPolicyFocusListener mFocusListener;
+
+ private Context mContext;
private AudioPolicyConfig mConfig;
+
/** @hide */
public AudioPolicyConfig getConfig() { return mConfig; }
+ /** @hide */
+ public boolean hasFocusListener() { return mFocusListener != null; }
/**
* The parameter is guaranteed non-null through the Builder
*/
- private AudioPolicy(AudioPolicyConfig config) {
+ private AudioPolicy(AudioPolicyConfig config, Context context, Looper looper,
+ AudioPolicyFocusListener fl, AudioPolicyStatusListener sl) {
mConfig = config;
- if (mConfig.mMixes.isEmpty()) {
- mStatus = POLICY_STATUS_INVALID;
+ mStatus = POLICY_STATUS_UNREGISTERED;
+ mContext = context;
+ if (looper == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper != null) {
+ mEventHandler = new EventHandler(this, looper);
} else {
- mStatus = POLICY_STATUS_UNREGISTERED;
+ mEventHandler = null;
+ Log.e(TAG, "No event handler due to looper without a thread");
}
+ mFocusListener = fl;
+ mStatusListener = sl;
}
/**
* Builder class for {@link AudioPolicy} objects
*/
+ @SystemApi
public static class Builder {
private ArrayList<AudioMix> mMixes;
+ private Context mContext;
+ private Looper mLooper;
+ private AudioPolicyFocusListener mFocusListener;
+ private AudioPolicyStatusListener mStatusListener;
/**
* Constructs a new Builder with no audio mixes.
+ * @param context the context for the policy
*/
- public Builder() {
+ @SystemApi
+ public Builder(Context context) {
mMixes = new ArrayList<AudioMix>();
+ mContext = context;
}
/**
@@ -90,7 +145,8 @@ public class AudioPolicy {
* @return the same Builder instance.
* @throws IllegalArgumentException
*/
- public Builder addMix(AudioMix mix) throws IllegalArgumentException {
+ @SystemApi
+ public Builder addMix(@NonNull AudioMix mix) throws IllegalArgumentException {
if (mix == null) {
throw new IllegalArgumentException("Illegal null AudioMix argument");
}
@@ -98,36 +154,361 @@ public class AudioPolicy {
return this;
}
+ /**
+ * Sets the {@link Looper} on which to run the event loop.
+ * @param looper a non-null specific Looper.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setLooper(@NonNull Looper looper) throws IllegalArgumentException {
+ if (looper == null) {
+ throw new IllegalArgumentException("Illegal null Looper argument");
+ }
+ mLooper = looper;
+ return this;
+ }
+
+ /**
+ * Sets the audio focus listener for the policy.
+ * @param l a {@link AudioPolicy.AudioPolicyFocusListener}
+ */
+ @SystemApi
+ public void setAudioPolicyFocusListener(AudioPolicyFocusListener l) {
+ mFocusListener = l;
+ }
+
+ /**
+ * Sets the audio policy status listener.
+ * @param l a {@link AudioPolicy.AudioPolicyStatusListener}
+ */
+ @SystemApi
+ public void setAudioPolicyStatusListener(AudioPolicyStatusListener l) {
+ mStatusListener = l;
+ }
+
+ @SystemApi
public AudioPolicy build() {
- return new AudioPolicy(new AudioPolicyConfig(mMixes));
+ return new AudioPolicy(new AudioPolicyConfig(mMixes), mContext, mLooper,
+ mFocusListener, mStatusListener);
+ }
+ }
+
+ public void setRegistration(String regId) {
+ synchronized (mLock) {
+ mRegistrationId = regId;
+ mConfig.setRegistration(regId);
+ if (regId != null) {
+ mStatus = POLICY_STATUS_REGISTERED;
+ } else {
+ mStatus = POLICY_STATUS_UNREGISTERED;
+ }
+ }
+ sendMsg(MSG_POLICY_STATUS_CHANGE);
+ }
+
+ private boolean policyReadyToUse() {
+ synchronized (mLock) {
+ if (mStatus != POLICY_STATUS_REGISTERED) {
+ Log.e(TAG, "Cannot use unregistered AudioPolicy");
+ return false;
+ }
+ if (mContext == null) {
+ Log.e(TAG, "Cannot use AudioPolicy without context");
+ return false;
+ }
+ if (mRegistrationId == null) {
+ Log.e(TAG, "Cannot use unregistered AudioPolicy");
+ return false;
+ }
+ }
+ if (!(PackageManager.PERMISSION_GRANTED == mContext.checkCallingOrSelfPermission(
+ android.Manifest.permission.MODIFY_AUDIO_ROUTING))) {
+ Slog.w(TAG, "Cannot use AudioPolicy for pid " + Binder.getCallingPid() + " / uid "
+ + Binder.getCallingUid() + ", needs MODIFY_AUDIO_ROUTING");
+ return false;
+ }
+ return true;
+ }
+
+ private void checkMixReadyToUse(AudioMix mix, boolean forTrack)
+ throws IllegalArgumentException{
+ if (mix == null) {
+ String msg = forTrack ? "Invalid null AudioMix for AudioTrack creation"
+ : "Invalid null AudioMix for AudioRecord creation";
+ throw new IllegalArgumentException(msg);
+ }
+ if (!mConfig.mMixes.contains(mix)) {
+ throw new IllegalArgumentException("Invalid mix: not part of this policy");
+ }
+ if ((mix.getRouteFlags() & AudioMix.ROUTE_FLAG_LOOP_BACK) != AudioMix.ROUTE_FLAG_LOOP_BACK)
+ {
+ throw new IllegalArgumentException("Invalid AudioMix: not defined for loop back");
+ }
+ if (forTrack && (mix.getMixType() != AudioMix.MIX_TYPE_RECORDERS)) {
+ throw new IllegalArgumentException(
+ "Invalid AudioMix: not defined for being a recording source");
+ }
+ if (!forTrack && (mix.getMixType() != AudioMix.MIX_TYPE_PLAYERS)) {
+ throw new IllegalArgumentException(
+ "Invalid AudioMix: not defined for capturing playback");
+ }
+ }
+
+ /**
+ * Returns the current behavior for audio focus-related ducking.
+ * @return {@link #FOCUS_POLICY_DUCKING_IN_APP} or {@link #FOCUS_POLICY_DUCKING_IN_POLICY}
+ */
+ @SystemApi
+ public int getFocusDuckingBehavior() {
+ return mConfig.mDuckingPolicy;
+ }
+
+ // Note on implementation: not part of the Builder as there can be only one registered policy
+ // that handles ducking but there can be multiple policies
+ /**
+ * Sets the behavior for audio focus-related ducking.
+ * There must be a focus listener if this policy is to handle ducking.
+ * @param behavior {@link #FOCUS_POLICY_DUCKING_IN_APP} or
+ * {@link #FOCUS_POLICY_DUCKING_IN_POLICY}
+ * @return {@link AudioManager#SUCCESS} or {@link AudioManager#ERROR} (for instance if there
+ * is already an audio policy that handles ducking).
+ * @throws IllegalArgumentException
+ * @throws IllegalStateException
+ */
+ @SystemApi
+ public int setFocusDuckingBehavior(int behavior)
+ throws IllegalArgumentException, IllegalStateException {
+ if ((behavior != FOCUS_POLICY_DUCKING_IN_APP)
+ && (behavior != FOCUS_POLICY_DUCKING_IN_POLICY)) {
+ throw new IllegalArgumentException("Invalid ducking behavior " + behavior);
+ }
+ synchronized (mLock) {
+ if (mStatus != POLICY_STATUS_REGISTERED) {
+ throw new IllegalStateException(
+ "Cannot change ducking behavior for unregistered policy");
+ }
+ if ((behavior == FOCUS_POLICY_DUCKING_IN_POLICY)
+ && (mFocusListener == null)) {
+ // there must be a focus listener if the policy handles ducking
+ throw new IllegalStateException(
+ "Cannot handle ducking without an audio focus listener");
+ }
+ IAudioService service = getService();
+ try {
+ final int status = service.setFocusPropertiesForPolicy(behavior /*duckingBehavior*/,
+ this.cb());
+ if (status == AudioManager.SUCCESS) {
+ mConfig.mDuckingPolicy = behavior;
+ }
+ return status;
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in setFocusPropertiesForPolicy for behavior", e);
+ return AudioManager.ERROR;
+ }
}
}
+ /**
+ * Create an {@link AudioRecord} instance that is associated with the given {@link AudioMix}.
+ * Audio buffers recorded through the created instance will contain the mix of the audio
+ * streams that fed the given mixer.
+ * @param mix a non-null {@link AudioMix} instance whose routing flags was defined with
+ * {@link AudioMix#ROUTE_FLAG_LOOP_BACK}, previously added to this policy.
+ * @return a new {@link AudioRecord} instance whose data format is the one defined in the
+ * {@link AudioMix}, or null if this policy was not successfully registered
+ * with {@link AudioManager#registerAudioPolicy(AudioPolicy)}.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public AudioRecord createAudioRecordSink(AudioMix mix) throws IllegalArgumentException {
+ if (!policyReadyToUse()) {
+ Log.e(TAG, "Cannot create AudioRecord sink for AudioMix");
+ return null;
+ }
+ checkMixReadyToUse(mix, false/*not for an AudioTrack*/);
+ // create an AudioFormat from the mix format compatible with recording, as the mix
+ // was defined for playback
+ AudioFormat mixFormat = new AudioFormat.Builder(mix.getFormat())
+ .setChannelMask(AudioFormat.inChannelMaskFromOutChannelMask(
+ mix.getFormat().getChannelMask()))
+ .build();
+ // create the AudioRecord, configured for loop back, using the same format as the mix
+ AudioRecord ar = new AudioRecord(
+ new AudioAttributes.Builder()
+ .setInternalCapturePreset(MediaRecorder.AudioSource.REMOTE_SUBMIX)
+ .addTag(addressForTag(mix))
+ .build(),
+ mixFormat,
+ AudioRecord.getMinBufferSize(mix.getFormat().getSampleRate(),
+ // using stereo for buffer size to avoid the current poor support for masks
+ AudioFormat.CHANNEL_IN_STEREO, mix.getFormat().getEncoding()),
+ AudioManager.AUDIO_SESSION_ID_GENERATE
+ );
+ return ar;
+ }
+ /**
+ * Create an {@link AudioTrack} instance that is associated with the given {@link AudioMix}.
+ * Audio buffers played through the created instance will be sent to the given mix
+ * to be recorded through the recording APIs.
+ * @param mix a non-null {@link AudioMix} instance whose routing flags was defined with
+ * {@link AudioMix#ROUTE_FLAG_LOOP_BACK}, previously added to this policy.
+ * @return a new {@link AudioTrack} instance whose data format is the one defined in the
+ * {@link AudioMix}, or null if this policy was not successfully registered
+ * with {@link AudioManager#registerAudioPolicy(AudioPolicy)}.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public AudioTrack createAudioTrackSource(AudioMix mix) throws IllegalArgumentException {
+ if (!policyReadyToUse()) {
+ Log.e(TAG, "Cannot create AudioTrack source for AudioMix");
+ return null;
+ }
+ checkMixReadyToUse(mix, true/*for an AudioTrack*/);
+ // create the AudioTrack, configured for loop back, using the same format as the mix
+ AudioTrack at = new AudioTrack(
+ new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_VIRTUAL_SOURCE)
+ .addTag(addressForTag(mix))
+ .build(),
+ mix.getFormat(),
+ AudioTrack.getMinBufferSize(mix.getFormat().getSampleRate(),
+ mix.getFormat().getChannelMask(), mix.getFormat().getEncoding()),
+ AudioTrack.MODE_STREAM,
+ AudioManager.AUDIO_SESSION_ID_GENERATE
+ );
+ return at;
+ }
+
+ @SystemApi
public int getStatus() {
return mStatus;
}
+ @SystemApi
public static abstract class AudioPolicyStatusListener {
- void onStatusChange() {}
- void onMixStateUpdate(AudioMix mix) {}
+ public void onStatusChange() {}
+ public void onMixStateUpdate(AudioMix mix) {}
+ }
+
+ @SystemApi
+ public static abstract class AudioPolicyFocusListener {
+ public void onAudioFocusGrant(AudioFocusInfo afi, int requestResult) {}
+ public void onAudioFocusLoss(AudioFocusInfo afi, boolean wasNotified) {}
}
- void setStatusListener(AudioPolicyStatusListener l) {
- mStatusListener = l;
+ private void onPolicyStatusChange() {
+ AudioPolicyStatusListener l;
+ synchronized (mLock) {
+ if (mStatusListener == null) {
+ return;
+ }
+ l = mStatusListener;
+ }
+ l.onStatusChange();
}
+ //==================================================
+ // Callback interface
+
/** @hide */
- @Override
- public String toString () {
+ public IAudioPolicyCallback cb() { return mPolicyCb; }
+
+ private final IAudioPolicyCallback mPolicyCb = new IAudioPolicyCallback.Stub() {
+
+ public void notifyAudioFocusGrant(AudioFocusInfo afi, int requestResult) {
+ sendMsg(MSG_FOCUS_GRANT, afi, requestResult);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusGrant: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId() + "reqRes=" + requestResult);
+ }
+ }
+
+ public void notifyAudioFocusLoss(AudioFocusInfo afi, boolean wasNotified) {
+ sendMsg(MSG_FOCUS_LOSS, afi, wasNotified ? 1 : 0);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusLoss: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId() + "wasNotified=" + wasNotified);
+ }
+ }
+ };
+
+ //==================================================
+ // Event handling
+ private final EventHandler mEventHandler;
+ private final static int MSG_POLICY_STATUS_CHANGE = 0;
+ private final static int MSG_FOCUS_GRANT = 1;
+ private final static int MSG_FOCUS_LOSS = 2;
+
+ private class EventHandler extends Handler {
+ public EventHandler(AudioPolicy ap, Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_POLICY_STATUS_CHANGE:
+ onPolicyStatusChange();
+ break;
+ case MSG_FOCUS_GRANT:
+ if (mFocusListener != null) {
+ mFocusListener.onAudioFocusGrant(
+ (AudioFocusInfo) msg.obj, msg.arg1);
+ }
+ break;
+ case MSG_FOCUS_LOSS:
+ if (mFocusListener != null) {
+ mFocusListener.onAudioFocusLoss(
+ (AudioFocusInfo) msg.obj, msg.arg1 != 0);
+ }
+ break;
+ default:
+ Log.e(TAG, "Unknown event " + msg.what);
+ }
+ }
+ }
+
+ //==========================================================
+ // Utils
+ private static String addressForTag(AudioMix mix) {
+ return "addr=" + mix.getRegistration();
+ }
+
+ private void sendMsg(int msg) {
+ if (mEventHandler != null) {
+ mEventHandler.sendEmptyMessage(msg);
+ }
+ }
+
+ private void sendMsg(int msg, Object obj, int i) {
+ if (mEventHandler != null) {
+ mEventHandler.sendMessage(
+ mEventHandler.obtainMessage(msg, i /*arg1*/, 0 /*arg2, ignored*/, obj));
+ }
+ }
+
+ private static IAudioService sService;
+
+ private static IAudioService getService()
+ {
+ if (sService != null) {
+ return sService;
+ }
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ sService = IAudioService.Stub.asInterface(b);
+ return sService;
+ }
+
+ public String toLogFriendlyString() {
String textDump = new String("android.media.audiopolicy.AudioPolicy:\n");
- textDump += "config=" + mConfig.toString();
+ textDump += "config=" + mConfig.toLogFriendlyString();
return (textDump);
}
/** @hide */
@IntDef({
- POLICY_STATUS_INVALID,
POLICY_STATUS_REGISTERED,
POLICY_STATUS_UNREGISTERED
})
diff --git a/media/java/android/media/audiopolicy/AudioPolicyConfig.java b/media/java/android/media/audiopolicy/AudioPolicyConfig.java
index 2fc6d58..019309d 100644
--- a/media/java/android/media/audiopolicy/AudioPolicyConfig.java
+++ b/media/java/android/media/audiopolicy/AudioPolicyConfig.java
@@ -27,6 +27,7 @@ import android.os.Parcelable;
import android.util.Log;
import java.util.ArrayList;
+import java.util.Objects;
/**
* @hide
@@ -36,7 +37,14 @@ public class AudioPolicyConfig implements Parcelable {
private static final String TAG = "AudioPolicyConfig";
- ArrayList<AudioMix> mMixes;
+ protected ArrayList<AudioMix> mMixes;
+ protected int mDuckingPolicy = AudioPolicy.FOCUS_POLICY_DUCKING_IN_APP;
+
+ private String mRegistrationId = null;
+
+ protected AudioPolicyConfig(AudioPolicyConfig conf) {
+ mMixes = conf.mMixes;
+ }
AudioPolicyConfig(ArrayList<AudioMix> mixes) {
mMixes = mixes;
@@ -56,6 +64,11 @@ public class AudioPolicyConfig implements Parcelable {
}
@Override
+ public int hashCode() {
+ return Objects.hash(mMixes);
+ }
+
+ @Override
public int describeContents() {
return 0;
}
@@ -74,8 +87,7 @@ public class AudioPolicyConfig implements Parcelable {
final ArrayList<AttributeMatchCriterion> criteria = mix.getRule().getCriteria();
dest.writeInt(criteria.size());
for (AttributeMatchCriterion criterion : criteria) {
- dest.writeInt(criterion.mRule);
- dest.writeInt(criterion.mAttr.getUsage());
+ criterion.writeToParcel(dest);
}
}
}
@@ -100,24 +112,13 @@ public class AudioPolicyConfig implements Parcelable {
AudioMixingRule.Builder ruleBuilder = new AudioMixingRule.Builder();
for (int j = 0 ; j < nbRules ; j++) {
// read the matching rules
- int matchRule = in.readInt();
- if ((matchRule == AudioMixingRule.RULE_EXCLUDE_ATTRIBUTE_USAGE)
- || (matchRule == AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE)) {
- int usage = in.readInt();
- final AudioAttributes attr = new AudioAttributes.Builder()
- .setUsage(usage).build();
- ruleBuilder.addRule(attr, matchRule);
- } else {
- Log.w(TAG, "Encountered unsupported rule, skipping");
- in.readInt();
- }
+ ruleBuilder.addRuleFromParcel(in);
}
mixBuilder.setMixingRule(ruleBuilder.build());
mMixes.add(mixBuilder.build());
}
}
- /** @hide */
public static final Parcelable.Creator<AudioPolicyConfig> CREATOR
= new Parcelable.Creator<AudioPolicyConfig>() {
/**
@@ -133,11 +134,9 @@ public class AudioPolicyConfig implements Parcelable {
}
};
- /** @hide */
- @Override
- public String toString () {
+ public String toLogFriendlyString () {
String textDump = new String("android.media.audiopolicy.AudioPolicyConfig:\n");
- textDump += mMixes.size() + " AudioMix:\n";
+ textDump += mMixes.size() + " AudioMix: "+ mRegistrationId + "\n";
for(AudioMix mix : mMixes) {
// write mix route flags
textDump += "* route flags=0x" + Integer.toHexString(mix.getRouteFlags()) + "\n";
@@ -158,6 +157,14 @@ public class AudioPolicyConfig implements Parcelable {
textDump += " match usage ";
textDump += criterion.mAttr.usageToString();
break;
+ case AudioMixingRule.RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET:
+ textDump += " exclude capture preset ";
+ textDump += criterion.mAttr.getCapturePreset();
+ break;
+ case AudioMixingRule.RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ textDump += " match capture preset ";
+ textDump += criterion.mAttr.getCapturePreset();
+ break;
default:
textDump += "invalid rule!";
}
@@ -166,4 +173,33 @@ public class AudioPolicyConfig implements Parcelable {
}
return textDump;
}
+
+ protected void setRegistration(String regId) {
+ final boolean currentRegNull = (mRegistrationId == null) || mRegistrationId.isEmpty();
+ final boolean newRegNull = (regId == null) || regId.isEmpty();
+ if (!currentRegNull && !newRegNull && !mRegistrationId.equals(regId)) {
+ Log.e(TAG, "Invalid registration transition from " + mRegistrationId + " to " + regId);
+ return;
+ }
+ mRegistrationId = regId == null ? "" : regId;
+ int mixIndex = 0;
+ for (AudioMix mix : mMixes) {
+ if (!mRegistrationId.isEmpty()) {
+ mix.setRegistration(mRegistrationId + "mix" + mixTypeId(mix.getMixType()) + ":"
+ + mixIndex++);
+ } else {
+ mix.setRegistration("");
+ }
+ }
+ }
+
+ private static String mixTypeId(int type) {
+ if (type == AudioMix.MIX_TYPE_PLAYERS) return "p";
+ else if (type == AudioMix.MIX_TYPE_RECORDERS) return "r";
+ else return "i";
+ }
+
+ protected String getRegistration() {
+ return mRegistrationId;
+ }
}
diff --git a/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl b/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl
new file mode 100644
index 0000000..c777c58
--- /dev/null
+++ b/media/java/android/media/audiopolicy/IAudioPolicyCallback.aidl
@@ -0,0 +1,28 @@
+/* Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiopolicy;
+
+import android.media.AudioFocusInfo;
+
+/**
+ * @hide
+ */
+oneway interface IAudioPolicyCallback {
+
+ // callbacks for audio focus
+ void notifyAudioFocusGrant(in AudioFocusInfo afi, int requestResult);
+ void notifyAudioFocusLoss(in AudioFocusInfo afi, boolean wasNotified);
+}
diff --git a/media/java/android/media/projection/MediaProjection.java b/media/java/android/media/projection/MediaProjection.java
index e6dadf9..a6bde1d 100644
--- a/media/java/android/media/projection/MediaProjection.java
+++ b/media/java/android/media/projection/MediaProjection.java
@@ -76,6 +76,9 @@ public final class MediaProjection {
if (callback == null) {
throw new IllegalArgumentException("callback should not be null");
}
+ if (handler == null) {
+ handler = new Handler();
+ }
mCallbacks.put(callback, new CallbackRecord(callback, handler));
}
@@ -126,8 +129,7 @@ public final class MediaProjection {
* invoked, or null if the callback should be invoked on the calling
* thread's main {@link android.os.Looper}.
*
- * @see android.hardware.display.DisplayManager#createVirtualDisplay(
- * String, int, int, int, int, Surface, VirtualDisplay.Callback, Handler)
+ * @see android.hardware.display.VirtualDisplay
*/
public VirtualDisplay createVirtualDisplay(@NonNull String name,
int width, int height, int dpi, int flags, @Nullable Surface surface,
@@ -183,16 +185,15 @@ public final class MediaProjection {
private final class MediaProjectionCallback extends IMediaProjectionCallback.Stub {
@Override
public void onStop() {
- final int N = mCallbacks.size();
- for (int i = 0; i < N; i++) {
- mCallbacks.get(i).onStop();
+ for (CallbackRecord cbr : mCallbacks.values()) {
+ cbr.onStop();
}
}
}
private final static class CallbackRecord {
- private Callback mCallback;
- private Handler mHandler;
+ private final Callback mCallback;
+ private final Handler mHandler;
public CallbackRecord(Callback callback, Handler handler) {
mCallback = callback;
diff --git a/media/java/android/media/session/MediaController.java b/media/java/android/media/session/MediaController.java
index e490c2b..dd6bd20 100644
--- a/media/java/android/media/session/MediaController.java
+++ b/media/java/android/media/session/MediaController.java
@@ -449,6 +449,7 @@ public final class MediaController {
}
MessageHandler holder = new MessageHandler(handler.getLooper(), cb);
mCallbacks.add(holder);
+ holder.mRegistered = true;
if (!mCbRegistered) {
try {
@@ -467,6 +468,7 @@ public final class MediaController {
if (cb == handler.mCallback) {
mCallbacks.remove(i);
success = true;
+ handler.mRegistered = false;
}
}
if (mCbRegistered && mCallbacks.size() == 0) {
@@ -956,6 +958,7 @@ public final class MediaController {
private final static class MessageHandler extends Handler {
private final MediaController.Callback mCallback;
+ private boolean mRegistered = false;
public MessageHandler(Looper looper, MediaController.Callback cb) {
super(looper, null, true);
@@ -964,6 +967,9 @@ public final class MediaController {
@Override
public void handleMessage(Message msg) {
+ if (!mRegistered) {
+ return;
+ }
switch (msg.what) {
case MSG_EVENT:
mCallback.onSessionEvent((String) msg.obj, msg.getData());
diff --git a/media/java/android/media/session/MediaSession.java b/media/java/android/media/session/MediaSession.java
index 86da80a..df4bc78 100644
--- a/media/java/android/media/session/MediaSession.java
+++ b/media/java/android/media/session/MediaSession.java
@@ -104,6 +104,7 @@ public final class MediaSession {
public @interface SessionFlags { }
private final Object mLock = new Object();
+ private final int mMaxBitmapSize;
private final MediaSession.Token mSessionToken;
private final MediaController mController;
@@ -147,6 +148,8 @@ public final class MediaSession {
if (TextUtils.isEmpty(tag)) {
throw new IllegalArgumentException("tag cannot be null or empty");
}
+ mMaxBitmapSize = context.getResources().getDimensionPixelSize(
+ com.android.internal.R.dimen.config_mediaMetadataBitmapMaxSize);
mCbStub = new CallbackStub(this);
MediaSessionManager manager = (MediaSessionManager) context
.getSystemService(Context.MEDIA_SESSION_SERVICE);
@@ -286,7 +289,9 @@ public final class MediaSession {
if (volumeProvider == null) {
throw new IllegalArgumentException("volumeProvider may not be null!");
}
- mVolumeProvider = volumeProvider;
+ synchronized (mLock) {
+ mVolumeProvider = volumeProvider;
+ }
volumeProvider.setCallback(new VolumeProvider.Callback() {
@Override
public void onVolumeChanged(VolumeProvider volumeProvider) {
@@ -407,6 +412,9 @@ public final class MediaSession {
* @param metadata The new metadata
*/
public void setMetadata(@Nullable MediaMetadata metadata) {
+ if (metadata != null ) {
+ metadata = (new MediaMetadata.Builder(metadata, mMaxBitmapSize)).build();
+ }
try {
mBinder.setMetadata(metadata);
} catch (RemoteException e) {
@@ -449,6 +457,27 @@ public final class MediaSession {
}
/**
+ * Set the style of rating used by this session. Apps trying to set the
+ * rating should use this style. Must be one of the following:
+ * <ul>
+ * <li>{@link Rating#RATING_NONE}</li>
+ * <li>{@link Rating#RATING_3_STARS}</li>
+ * <li>{@link Rating#RATING_4_STARS}</li>
+ * <li>{@link Rating#RATING_5_STARS}</li>
+ * <li>{@link Rating#RATING_HEART}</li>
+ * <li>{@link Rating#RATING_PERCENTAGE}</li>
+ * <li>{@link Rating#RATING_THUMB_UP_DOWN}</li>
+ * </ul>
+ */
+ public void setRatingType(int type) {
+ try {
+ mBinder.setRatingType(type);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in setRatingType.", e);
+ }
+ }
+
+ /**
* Set some extras that can be associated with the {@link MediaSession}. No assumptions should
* be made as to how a {@link MediaController} will handle these extras.
* Keys should be fully qualified (e.g. com.example.MY_EXTRA) to avoid conflicts.
@@ -470,9 +499,11 @@ public final class MediaSession {
* @hide
*/
public void notifyRemoteVolumeChanged(VolumeProvider provider) {
- if (provider == null || provider != mVolumeProvider) {
- Log.w(TAG, "Received update from stale volume provider");
- return;
+ synchronized (mLock) {
+ if (provider == null || provider != mVolumeProvider) {
+ Log.w(TAG, "Received update from stale volume provider");
+ return;
+ }
}
try {
mBinder.setCurrentVolume(provider.getCurrentVolume());
@@ -537,6 +568,14 @@ public final class MediaSession {
postToCallback(CallbackMessageHandler.MSG_MEDIA_BUTTON, mediaButtonIntent);
}
+ private void dispatchAdjustVolume(int direction) {
+ postToCallback(CallbackMessageHandler.MSG_ADJUST_VOLUME, direction);
+ }
+
+ private void dispatchSetVolumeTo(int volume) {
+ postToCallback(CallbackMessageHandler.MSG_SET_VOLUME, volume);
+ }
+
private void postToCallback(int what) {
postToCallback(what, null);
}
@@ -988,9 +1027,7 @@ public final class MediaSession {
public void onAdjustVolume(int direction) {
MediaSession session = mMediaSession.get();
if (session != null) {
- if (session.mVolumeProvider != null) {
- session.mVolumeProvider.onAdjustVolume(direction);
- }
+ session.dispatchAdjustVolume(direction);
}
}
@@ -998,9 +1035,7 @@ public final class MediaSession {
public void onSetVolumeTo(int value) {
MediaSession session = mMediaSession.get();
if (session != null) {
- if (session.mVolumeProvider != null) {
- session.mVolumeProvider.onSetVolumeTo(value);
- }
+ session.dispatchSetVolumeTo(value);
}
}
@@ -1117,6 +1152,8 @@ public final class MediaSession {
private static final int MSG_CUSTOM_ACTION = 13;
private static final int MSG_MEDIA_BUTTON = 14;
private static final int MSG_COMMAND = 15;
+ private static final int MSG_ADJUST_VOLUME = 16;
+ private static final int MSG_SET_VOLUME = 17;
private MediaSession.Callback mCallback;
@@ -1145,6 +1182,7 @@ public final class MediaSession {
@Override
public void handleMessage(Message msg) {
+ VolumeProvider vp;
switch (msg.what) {
case MSG_PLAY:
mCallback.onPlay();
@@ -1192,6 +1230,22 @@ public final class MediaSession {
Command cmd = (Command) msg.obj;
mCallback.onCommand(cmd.command, cmd.extras, cmd.stub);
break;
+ case MSG_ADJUST_VOLUME:
+ synchronized (mLock) {
+ vp = mVolumeProvider;
+ }
+ if (vp != null) {
+ vp.onAdjustVolume((int) msg.obj);
+ }
+ break;
+ case MSG_SET_VOLUME:
+ synchronized (mLock) {
+ vp = mVolumeProvider;
+ }
+ if (vp != null) {
+ vp.onSetVolumeTo((int) msg.obj);
+ }
+ break;
}
}
}
diff --git a/media/java/android/media/session/MediaSessionLegacyHelper.java b/media/java/android/media/session/MediaSessionLegacyHelper.java
index b37ee6e..7ea269b 100644
--- a/media/java/android/media/session/MediaSessionLegacyHelper.java
+++ b/media/java/android/media/session/MediaSessionLegacyHelper.java
@@ -69,12 +69,9 @@ public class MediaSessionLegacyHelper {
}
public static MediaSessionLegacyHelper getHelper(Context context) {
- if (DEBUG) {
- Log.d(TAG, "Attempting to get helper with context " + context);
- }
synchronized (sLock) {
if (sInstance == null) {
- sInstance = new MediaSessionLegacyHelper(context);
+ sInstance = new MediaSessionLegacyHelper(context.getApplicationContext());
}
}
return sInstance;
@@ -190,6 +187,7 @@ public class MediaSessionLegacyHelper {
boolean down = keyEvent.getAction() == KeyEvent.ACTION_DOWN;
boolean up = keyEvent.getAction() == KeyEvent.ACTION_UP;
int direction = 0;
+ boolean isMute = false;
switch (keyEvent.getKeyCode()) {
case KeyEvent.KEYCODE_VOLUME_UP:
direction = AudioManager.ADJUST_RAISE;
@@ -198,15 +196,11 @@ public class MediaSessionLegacyHelper {
direction = AudioManager.ADJUST_LOWER;
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
- // TODO
+ isMute = true;
break;
}
- if ((down || up) && direction != 0) {
+ if (down || up) {
int flags;
- // If this is action up we want to send a beep for non-music events
- if (up) {
- direction = 0;
- }
if (musicOnly) {
// This flag is used when the screen is off to only affect
// active media
@@ -219,9 +213,23 @@ public class MediaSessionLegacyHelper {
flags = AudioManager.FLAG_SHOW_UI | AudioManager.FLAG_VIBRATE;
}
}
-
- mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
- direction, flags);
+ if (direction != 0) {
+ // If this is action up we want to send a beep for non-music events
+ if (up) {
+ direction = 0;
+ }
+ mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
+ direction, flags);
+ } else if (isMute) {
+ if (down) {
+ // We need to send two volume events on down, one to mute
+ // and one to show the UI
+ mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
+ MediaSessionManager.DIRECTION_MUTE, flags);
+ }
+ mSessionManager.dispatchAdjustVolume(AudioManager.USE_DEFAULT_STREAM_TYPE,
+ 0 /* direction, causes UI to show on down */, flags);
+ }
}
}
diff --git a/media/java/android/media/session/MediaSessionManager.java b/media/java/android/media/session/MediaSessionManager.java
index b4fff8f..a4ef851 100644
--- a/media/java/android/media/session/MediaSessionManager.java
+++ b/media/java/android/media/session/MediaSessionManager.java
@@ -59,6 +59,14 @@ public final class MediaSessionManager {
private Context mContext;
/**
+ * Special flag for sending the mute key to dispatchAdjustVolume used by the
+ * system.
+ *
+ * @hide
+ */
+ public static final int DIRECTION_MUTE = -99;
+
+ /**
* @hide
*/
public MediaSessionManager(Context context) {
diff --git a/media/java/android/media/session/PlaybackState.java b/media/java/android/media/session/PlaybackState.java
index 267d1ff..54d0acd 100644
--- a/media/java/android/media/session/PlaybackState.java
+++ b/media/java/android/media/session/PlaybackState.java
@@ -16,6 +16,7 @@
package android.media.session;
import android.annotation.DrawableRes;
+import android.annotation.Nullable;
import android.media.RemoteControlClient;
import android.os.Bundle;
import android.os.Parcel;
@@ -232,11 +233,12 @@ public final class PlaybackState implements Parcelable {
private final CharSequence mErrorMessage;
private final long mUpdateTime;
private final long mActiveItemId;
+ private final Bundle mExtras;
private PlaybackState(int state, long position, long updateTime, float speed,
long bufferedPosition, long transportControls,
List<PlaybackState.CustomAction> customActions, long activeItemId,
- CharSequence error) {
+ CharSequence error, Bundle extras) {
mState = state;
mPosition = position;
mSpeed = speed;
@@ -246,6 +248,7 @@ public final class PlaybackState implements Parcelable {
mCustomActions = new ArrayList<>(customActions);
mActiveItemId = activeItemId;
mErrorMessage = error;
+ mExtras = extras;
}
private PlaybackState(Parcel in) {
@@ -258,7 +261,7 @@ public final class PlaybackState implements Parcelable {
mCustomActions = in.createTypedArrayList(CustomAction.CREATOR);
mActiveItemId = in.readLong();
mErrorMessage = in.readCharSequence();
-
+ mExtras = in.readBundle();
}
@Override
@@ -293,6 +296,7 @@ public final class PlaybackState implements Parcelable {
dest.writeTypedList(mCustomActions);
dest.writeLong(mActiveItemId);
dest.writeCharSequence(mErrorMessage);
+ dest.writeBundle(mExtras);
}
/**
@@ -306,6 +310,7 @@ public final class PlaybackState implements Parcelable {
* <li> {@link PlaybackState#STATE_REWINDING}</li>
* <li> {@link PlaybackState#STATE_BUFFERING}</li>
* <li> {@link PlaybackState#STATE_ERROR}</li>
+ * </ul>
*/
public int getState() {
return mState;
@@ -394,6 +399,15 @@ public final class PlaybackState implements Parcelable {
}
/**
+ * Get any custom extras that were set on this playback state.
+ *
+ * @return The extras for this state or null.
+ */
+ public @Nullable Bundle getExtras() {
+ return mExtras;
+ }
+
+ /**
* Get the {@link PlaybackState} state for the given
* {@link RemoteControlClient} state.
*
@@ -737,6 +751,7 @@ public final class PlaybackState implements Parcelable {
private CharSequence mErrorMessage;
private long mUpdateTime;
private long mActiveItemId = MediaSession.QueueItem.UNKNOWN_ID;
+ private Bundle mExtras;
/**
* Creates an initially empty state builder.
@@ -765,6 +780,7 @@ public final class PlaybackState implements Parcelable {
mErrorMessage = from.mErrorMessage;
mUpdateTime = from.mUpdateTime;
mActiveItemId = from.mActiveItemId;
+ mExtras = from.mExtras;
}
/**
@@ -947,13 +963,25 @@ public final class PlaybackState implements Parcelable {
}
/**
- * Build and return the {@link PlaybackState} instance with these values.
+ * Set any custom extras to be included with the playback state.
+ *
+ * @param extras The extras to include.
+ * @return this
+ */
+ public Builder setExtras(Bundle extras) {
+ mExtras = extras;
+ return this;
+ }
+
+ /**
+ * Build and return the {@link PlaybackState} instance with these
+ * values.
*
* @return A new state instance.
*/
public PlaybackState build() {
return new PlaybackState(mState, mPosition, mUpdateTime, mSpeed, mBufferedPosition,
- mActions, mCustomActions, mActiveItemId, mErrorMessage);
+ mActions, mCustomActions, mActiveItemId, mErrorMessage, mExtras);
}
}
}
diff --git a/media/java/android/media/tv/ITvInputManager.aidl b/media/java/android/media/tv/ITvInputManager.aidl
index 6ca794e..21549c9 100644
--- a/media/java/android/media/tv/ITvInputManager.aidl
+++ b/media/java/android/media/tv/ITvInputManager.aidl
@@ -38,6 +38,7 @@ import android.view.Surface;
interface ITvInputManager {
List<TvInputInfo> getTvInputList(int userId);
TvInputInfo getTvInputInfo(in String inputId, int userId);
+ int getTvInputState(in String inputId, int userId);
List<TvContentRatingSystemInfo> getTvContentRatingSystemList(int userId);
diff --git a/media/java/android/media/tv/ITvInputSessionWrapper.java b/media/java/android/media/tv/ITvInputSessionWrapper.java
index b8cdc4b..94c9690 100644
--- a/media/java/android/media/tv/ITvInputSessionWrapper.java
+++ b/media/java/android/media/tv/ITvInputSessionWrapper.java
@@ -42,6 +42,7 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
private static final String TAG = "TvInputSessionWrapper";
private static final int MESSAGE_HANDLING_DURATION_THRESHOLD_MILLIS = 50;
+ private static final int MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS = 2000;
private static final int DO_RELEASE = 1;
private static final int DO_SET_MAIN = 2;
@@ -161,11 +162,17 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
if (duration > MESSAGE_HANDLING_DURATION_THRESHOLD_MILLIS) {
Log.w(TAG, "Handling message (" + msg.what + ") took too long time (duration="
+ duration + "ms)");
+ if (msg.what == DO_TUNE && duration > MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS) {
+ throw new RuntimeException("Too much time to handle tune request. (" + duration
+ + "ms > " + MESSAGE_TUNE_DURATION_THRESHOLD_MILLIS + "ms) "
+ + "Consider handling the tune request in a separate thread.");
+ }
}
}
@Override
public void release() {
+ mTvInputSessionImpl.scheduleOverlayViewCleanup();
mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_RELEASE));
}
@@ -192,6 +199,8 @@ public class ITvInputSessionWrapper extends ITvInputSession.Stub implements Hand
@Override
public void tune(Uri channelUri, Bundle params) {
+ // Clear the pending tune requests.
+ mCaller.removeMessages(DO_TUNE);
mCaller.executeOrSendMessage(mCaller.obtainMessageOO(DO_TUNE, channelUri, params));
}
diff --git a/media/java/android/media/tv/TvContract.java b/media/java/android/media/tv/TvContract.java
index b3890d4..5b92266 100644
--- a/media/java/android/media/tv/TvContract.java
+++ b/media/java/android/media/tv/TvContract.java
@@ -856,6 +856,9 @@ public final class TvContract {
/**
* The start time of this TV program, in milliseconds since the epoch.
* <p>
+ * The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
+ * previous program in the same channel.
+ * </p><p>
* Type: INTEGER (long)
* </p>
*/
@@ -864,6 +867,9 @@ public final class TvContract {
/**
* The end time of this TV program, in milliseconds since the epoch.
* <p>
+ * The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
+ * next program in the same channel.
+ * </p><p>
* Type: INTEGER (long)
* </p>
*/
@@ -1052,6 +1058,24 @@ public final class TvContract {
/** The genre for Gaming. */
public static final String GAMING = "GAMING";
+ /** The genre for Arts. */
+ public static final String ARTS = "ARTS";
+
+ /** The genre for Entertainment. */
+ public static final String ENTERTAINMENT = "ENTERTAINMENT";
+
+ /** The genre for Life Style. */
+ public static final String LIFE_STYLE = "LIFE_STYLE";
+
+ /** The genre for Music. */
+ public static final String MUSIC = "MUSIC";
+
+ /** The genre for Premier. */
+ public static final String PREMIER = "PREMIER";
+
+ /** The genre for Tech/Science. */
+ public static final String TECH_SCIENCE = "TECH_SCIENCE";
+
private static final ArraySet<String> CANONICAL_GENRES = new ArraySet<String>();
static {
CANONICAL_GENRES.add(FAMILY_KIDS);
@@ -1065,6 +1089,12 @@ public final class TvContract {
CANONICAL_GENRES.add(ANIMAL_WILDLIFE);
CANONICAL_GENRES.add(NEWS);
CANONICAL_GENRES.add(GAMING);
+ CANONICAL_GENRES.add(ARTS);
+ CANONICAL_GENRES.add(ENTERTAINMENT);
+ CANONICAL_GENRES.add(LIFE_STYLE);
+ CANONICAL_GENRES.add(MUSIC);
+ CANONICAL_GENRES.add(PREMIER);
+ CANONICAL_GENRES.add(TECH_SCIENCE);
}
private Genres() {}
diff --git a/media/java/android/media/tv/TvInputInfo.java b/media/java/android/media/tv/TvInputInfo.java
index 54ebc6a..b9e99d2 100644
--- a/media/java/android/media/tv/TvInputInfo.java
+++ b/media/java/android/media/tv/TvInputInfo.java
@@ -241,6 +241,9 @@ public final class TvInputInfo implements Parcelable {
if (DEBUG) {
Log.d(TAG, "Setup activity loaded. [" + input.mSetupActivity + "] for " + si.name);
}
+ if (inputType == TYPE_TUNER && TextUtils.isEmpty(input.mSetupActivity)) {
+ throw new XmlPullParserException("Setup activity not found in " + si.name);
+ }
input.mSettingsActivity = sa.getString(
com.android.internal.R.styleable.TvInputService_settingsActivity);
if (DEBUG) {
diff --git a/media/java/android/media/tv/TvInputManager.java b/media/java/android/media/tv/TvInputManager.java
index 78714d2..f55299e 100644
--- a/media/java/android/media/tv/TvInputManager.java
+++ b/media/java/android/media/tv/TvInputManager.java
@@ -72,6 +72,17 @@ public final class TvInputManager {
public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = VIDEO_UNAVAILABLE_REASON_END;
/**
+ * The TV input is in unknown state.
+ * <p>
+ * State for denoting unknown TV input state. The typical use case is when a requested TV
+ * input is removed from the device or it is not registered. Used in
+ * {@code ITvInputManager.getTvInputState()}.
+ * </p>
+ * @hide
+ */
+ public static final int INPUT_STATE_UNKNOWN = -1;
+
+ /**
* The TV input is connected.
* <p>
* State for {@link #getInputState} and {@link
@@ -127,10 +138,10 @@ public final class TvInputManager {
* <receiver android:name=".TvInputReceiver">
* <intent-filter>
* <action android:name=
- * "android.media.tv.TvInputManager.ACTION_QUERY_CONTENT_RATING_SYSTEMS" />
+ * "android.media.tv.action.QUERY_CONTENT_RATING_SYSTEMS" />
* </intent-filter>
* <meta-data
- * android:name="android.media.tv.TvInputManager.META_DATA_CONTENT_RATING_SYSTEMS"
+ * android:name="android.media.tv.metadata.CONTENT_RATING_SYSTEMS"
* android:resource="@xml/tv_content_rating_systems" />
* </receiver>}</pre></p>
* In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
@@ -159,12 +170,12 @@ public final class TvInputManager {
private final Object mLock = new Object();
- // @GuardedBy(mLock)
+ // @GuardedBy("mLock")
private final List<TvInputCallbackRecord> mCallbackRecords =
new LinkedList<TvInputCallbackRecord>();
// A mapping from TV input ID to the state of corresponding input.
- // @GuardedBy(mLock)
+ // @GuardedBy("mLock")
private final Map<String, Integer> mStateMap = new ArrayMap<String, Integer>();
// A mapping from the sequence number of a session to its SessionCallbackRecord.
@@ -207,7 +218,7 @@ public final class TvInputManager {
/**
* This is called when the channel of this session is changed by the underlying TV input
- * with out any {@link TvInputManager.Session#tune(Uri)} request.
+ * without any {@link TvInputManager.Session#tune(Uri)} request.
*
* @param session A {@link TvInputManager.Session} associated with this callback.
* @param channelUri The URI of a channel.
@@ -227,7 +238,7 @@ public final class TvInputManager {
/**
* This is called when a track for a given type is selected.
*
- * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param session A {@link TvInputManager.Session} associated with this callback.
* @param type The type of the selected track. The type can be
* {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
* {@link TvTrackInfo#TYPE_SUBTITLE}.
@@ -238,6 +249,18 @@ public final class TvInputManager {
}
/**
+ * This is invoked when the video size has been changed. It is also called when the first
+ * time video size information becomes available after the session is tuned to a specific
+ * channel.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param width The width of the video.
+ * @param height The height of the video.
+ */
+ public void onVideoSizeChanged(Session session, int width, int height) {
+ }
+
+ /**
* This is called when the video is available, so the TV input starts the playback.
*
* @param session A {@link TvInputManager.Session} associated with this callback.
@@ -312,13 +335,13 @@ public final class TvInputManager {
private final Handler mHandler;
private Session mSession;
- public SessionCallbackRecord(SessionCallback sessionCallback,
+ SessionCallbackRecord(SessionCallback sessionCallback,
Handler handler) {
mSessionCallback = sessionCallback;
mHandler = handler;
}
- public void postSessionCreated(final Session session) {
+ void postSessionCreated(final Session session) {
mSession = session;
mHandler.post(new Runnable() {
@Override
@@ -328,7 +351,7 @@ public final class TvInputManager {
});
}
- public void postSessionReleased() {
+ void postSessionReleased() {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -337,7 +360,7 @@ public final class TvInputManager {
});
}
- public void postChannelRetuned(final Uri channelUri) {
+ void postChannelRetuned(final Uri channelUri) {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -346,49 +369,34 @@ public final class TvInputManager {
});
}
- public void postTracksChanged(final List<TvTrackInfo> tracks) {
+ void postTracksChanged(final List<TvTrackInfo> tracks) {
mHandler.post(new Runnable() {
@Override
public void run() {
- mSession.mAudioTracks.clear();
- mSession.mVideoTracks.clear();
- mSession.mSubtitleTracks.clear();
- for (TvTrackInfo track : tracks) {
- if (track.getType() == TvTrackInfo.TYPE_AUDIO) {
- mSession.mAudioTracks.add(track);
- } else if (track.getType() == TvTrackInfo.TYPE_VIDEO) {
- mSession.mVideoTracks.add(track);
- } else if (track.getType() == TvTrackInfo.TYPE_SUBTITLE) {
- mSession.mSubtitleTracks.add(track);
- } else {
- // Silently ignore.
- }
- }
mSessionCallback.onTracksChanged(mSession, tracks);
}
});
}
- public void postTrackSelected(final int type, final String trackId) {
+ void postTrackSelected(final int type, final String trackId) {
mHandler.post(new Runnable() {
@Override
public void run() {
- if (type == TvTrackInfo.TYPE_AUDIO) {
- mSession.mSelectedAudioTrackId = trackId;
- } else if (type == TvTrackInfo.TYPE_VIDEO) {
- mSession.mSelectedVideoTrackId = trackId;
- } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
- mSession.mSelectedSubtitleTrackId = trackId;
- } else {
- // Silently ignore.
- return;
- }
mSessionCallback.onTrackSelected(mSession, type, trackId);
}
});
}
- public void postVideoAvailable() {
+ void postVideoSizeChanged(final int width, final int height) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onVideoSizeChanged(mSession, width, height);
+ }
+ });
+ }
+
+ void postVideoAvailable() {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -397,7 +405,7 @@ public final class TvInputManager {
});
}
- public void postVideoUnavailable(final int reason) {
+ void postVideoUnavailable(final int reason) {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -406,7 +414,7 @@ public final class TvInputManager {
});
}
- public void postContentAllowed() {
+ void postContentAllowed() {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -415,7 +423,7 @@ public final class TvInputManager {
});
}
- public void postContentBlocked(final TvContentRating rating) {
+ void postContentBlocked(final TvContentRating rating) {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -424,7 +432,7 @@ public final class TvInputManager {
});
}
- public void postLayoutSurface(final int left, final int top, final int right,
+ void postLayoutSurface(final int left, final int top, final int right,
final int bottom) {
mHandler.post(new Runnable() {
@Override
@@ -434,7 +442,7 @@ public final class TvInputManager {
});
}
- public void postSessionEvent(final String eventType, final Bundle eventArgs) {
+ void postSessionEvent(final String eventType, final Bundle eventArgs) {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -610,7 +618,10 @@ public final class TvInputManager {
Log.e(TAG, "Callback not found for seq " + seq);
return;
}
- record.postTracksChanged(tracks);
+ if (record.mSession.updateTracks(tracks)) {
+ record.postTracksChanged(tracks);
+ postVideoSizeChangedIfNeededLocked(record);
+ }
}
}
@@ -622,7 +633,17 @@ public final class TvInputManager {
Log.e(TAG, "Callback not found for seq " + seq);
return;
}
- record.postTrackSelected(type, trackId);
+ if (record.mSession.updateTrackSelection(type, trackId)) {
+ record.postTrackSelected(type, trackId);
+ postVideoSizeChangedIfNeededLocked(record);
+ }
+ }
+ }
+
+ private void postVideoSizeChangedIfNeededLocked(SessionCallbackRecord record) {
+ TvTrackInfo track = record.mSession.getVideoTrackToNotify();
+ if (track != null) {
+ record.postVideoSizeChanged(track.getVideoWidth(), track.getVideoHeight());
}
}
@@ -741,9 +762,19 @@ public final class TvInputManager {
try {
if (mService != null) {
mService.registerCallback(mManagerCallback, mUserId);
+ List<TvInputInfo> infos = mService.getTvInputList(mUserId);
+ synchronized (mLock) {
+ for (TvInputInfo info : infos) {
+ String inputId = info.getId();
+ int state = mService.getTvInputState(inputId, mUserId);
+ if (state != INPUT_STATE_UNKNOWN) {
+ mStateMap.put(inputId, state);
+ }
+ }
+ }
}
} catch (RemoteException e) {
- Log.e(TAG, "mService.registerCallback failed: " + e);
+ Log.e(TAG, "TvInputManager initialization failed: " + e);
}
}
@@ -778,7 +809,7 @@ public final class TvInputManager {
}
/**
- * Returns the state of a given TV input. It retuns one of the following:
+ * Returns the state of a given TV input. It returns one of the following:
* <ul>
* <li>{@link #INPUT_STATE_CONNECTED}
* <li>{@link #INPUT_STATE_CONNECTED_STANDBY}
@@ -1002,6 +1033,12 @@ public final class TvInputManager {
/**
* Returns the TvStreamConfig list of the given TV input.
*
+ * If you are using {@link Hardware} object from {@link
+ * #acquireTvInputHardware}, you should get the list of available streams
+ * from {@link HardwareCallback#onStreamConfigChanged} method, not from
+ * here. This method is designed to be used with {@link #captureFrame} in
+ * capture scenarios specifically and not suitable for any other use.
+ *
* @param inputId the id of the TV input.
* @return List of {@link TvStreamConfig} which is available for capturing
* of the given TV input.
@@ -1133,12 +1170,24 @@ public final class TvInputManager {
private IBinder mToken;
private TvInputEventSender mSender;
private InputChannel mChannel;
+
+ private final Object mTrackLock = new Object();
+ // @GuardedBy("mTrackLock")
private final List<TvTrackInfo> mAudioTracks = new ArrayList<TvTrackInfo>();
+ // @GuardedBy("mTrackLock")
private final List<TvTrackInfo> mVideoTracks = new ArrayList<TvTrackInfo>();
+ // @GuardedBy("mTrackLock")
private final List<TvTrackInfo> mSubtitleTracks = new ArrayList<TvTrackInfo>();
+ // @GuardedBy("mTrackLock")
private String mSelectedAudioTrackId;
+ // @GuardedBy("mTrackLock")
private String mSelectedVideoTrackId;
+ // @GuardedBy("mTrackLock")
private String mSelectedSubtitleTrackId;
+ // @GuardedBy("mTrackLock")
+ private int mVideoWidth;
+ // @GuardedBy("mTrackLock")
+ private int mVideoHeight;
private Session(IBinder token, InputChannel channel, ITvInputManager service, int userId,
int seq, SparseArray<SessionCallbackRecord> sessionCallbackRecordMap) {
@@ -1273,12 +1322,16 @@ public final class TvInputManager {
Log.w(TAG, "The session has been already released");
return;
}
- mAudioTracks.clear();
- mVideoTracks.clear();
- mSubtitleTracks.clear();
- mSelectedAudioTrackId = null;
- mSelectedVideoTrackId = null;
- mSelectedSubtitleTrackId = null;
+ synchronized (mTrackLock) {
+ mAudioTracks.clear();
+ mVideoTracks.clear();
+ mSubtitleTracks.clear();
+ mSelectedAudioTrackId = null;
+ mSelectedVideoTrackId = null;
+ mSelectedSubtitleTrackId = null;
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+ }
try {
mService.tune(mToken, channelUri, params, mUserId);
} catch (RemoteException e) {
@@ -1314,23 +1367,25 @@ public final class TvInputManager {
* @see #getTracks
*/
public void selectTrack(int type, String trackId) {
- if (type == TvTrackInfo.TYPE_AUDIO) {
- if (trackId != null && !containsTrack(mAudioTracks, trackId)) {
- Log.w(TAG, "Invalid audio trackId: " + trackId);
- return;
- }
- } else if (type == TvTrackInfo.TYPE_VIDEO) {
- if (trackId != null && !containsTrack(mVideoTracks, trackId)) {
- Log.w(TAG, "Invalid video trackId: " + trackId);
- return;
- }
- } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
- if (trackId != null && !containsTrack(mSubtitleTracks, trackId)) {
- Log.w(TAG, "Invalid subtitle trackId: " + trackId);
- return;
+ synchronized (mTrackLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ if (trackId != null && !containsTrack(mAudioTracks, trackId)) {
+ Log.w(TAG, "Invalid audio trackId: " + trackId);
+ return;
+ }
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ if (trackId != null && !containsTrack(mVideoTracks, trackId)) {
+ Log.w(TAG, "Invalid video trackId: " + trackId);
+ return;
+ }
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ if (trackId != null && !containsTrack(mSubtitleTracks, trackId)) {
+ Log.w(TAG, "Invalid subtitle trackId: " + trackId);
+ return;
+ }
+ } else {
+ throw new IllegalArgumentException("invalid type: " + type);
}
- } else {
- throw new IllegalArgumentException("invalid type: " + type);
}
if (mToken == null) {
Log.w(TAG, "The session has been already released");
@@ -1361,21 +1416,23 @@ public final class TvInputManager {
* @return the list of tracks for the given type.
*/
public List<TvTrackInfo> getTracks(int type) {
- if (type == TvTrackInfo.TYPE_AUDIO) {
- if (mAudioTracks == null) {
- return null;
- }
- return mAudioTracks;
- } else if (type == TvTrackInfo.TYPE_VIDEO) {
- if (mVideoTracks == null) {
- return null;
- }
- return mVideoTracks;
- } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
- if (mSubtitleTracks == null) {
- return null;
+ synchronized (mTrackLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ if (mAudioTracks == null) {
+ return null;
+ }
+ return new ArrayList<TvTrackInfo>(mAudioTracks);
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ if (mVideoTracks == null) {
+ return null;
+ }
+ return new ArrayList<TvTrackInfo>(mVideoTracks);
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ if (mSubtitleTracks == null) {
+ return null;
+ }
+ return new ArrayList<TvTrackInfo>(mSubtitleTracks);
}
- return mSubtitleTracks;
}
throw new IllegalArgumentException("invalid type: " + type);
}
@@ -1388,17 +1445,89 @@ public final class TvInputManager {
* @see #selectTrack
*/
public String getSelectedTrack(int type) {
- if (type == TvTrackInfo.TYPE_AUDIO) {
- return mSelectedAudioTrackId;
- } else if (type == TvTrackInfo.TYPE_VIDEO) {
- return mSelectedVideoTrackId;
- } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
- return mSelectedSubtitleTrackId;
+ synchronized (mTrackLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ return mSelectedAudioTrackId;
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ return mSelectedVideoTrackId;
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ return mSelectedSubtitleTrackId;
+ }
}
throw new IllegalArgumentException("invalid type: " + type);
}
/**
+ * Responds to onTracksChanged() and updates the internal track information. Returns true if
+ * there is an update.
+ */
+ boolean updateTracks(List<TvTrackInfo> tracks) {
+ synchronized (mTrackLock) {
+ mAudioTracks.clear();
+ mVideoTracks.clear();
+ mSubtitleTracks.clear();
+ for (TvTrackInfo track : tracks) {
+ if (track.getType() == TvTrackInfo.TYPE_AUDIO) {
+ mAudioTracks.add(track);
+ } else if (track.getType() == TvTrackInfo.TYPE_VIDEO) {
+ mVideoTracks.add(track);
+ } else if (track.getType() == TvTrackInfo.TYPE_SUBTITLE) {
+ mSubtitleTracks.add(track);
+ }
+ }
+ return !mAudioTracks.isEmpty() || !mVideoTracks.isEmpty()
+ || !mSubtitleTracks.isEmpty();
+ }
+ }
+
+ /**
+ * Responds to onTrackSelected() and updates the internal track selection information.
+ * Returns true if there is an update.
+ */
+ boolean updateTrackSelection(int type, String trackId) {
+ synchronized (mTrackLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO && trackId != mSelectedAudioTrackId) {
+ mSelectedAudioTrackId = trackId;
+ return true;
+ } else if (type == TvTrackInfo.TYPE_VIDEO && trackId != mSelectedVideoTrackId) {
+ mSelectedVideoTrackId = trackId;
+ return true;
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE
+ && trackId != mSelectedSubtitleTrackId) {
+ mSelectedSubtitleTrackId = trackId;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns the new/updated video track that contains new video size information. Returns
+ * null if there is no video track to notify. Subsequent calls of this method results in a
+ * non-null video track returned only by the first call and null returned by following
+ * calls. The caller should immediately notify of the video size change upon receiving the
+ * track.
+ */
+ TvTrackInfo getVideoTrackToNotify() {
+ synchronized (mTrackLock) {
+ if (!mVideoTracks.isEmpty() && mSelectedVideoTrackId != null) {
+ for (TvTrackInfo track : mVideoTracks) {
+ if (track.getId().equals(mSelectedVideoTrackId)) {
+ int videoWidth = track.getVideoWidth();
+ int videoHeight = track.getVideoHeight();
+ if (mVideoWidth != videoWidth || mVideoHeight != videoHeight) {
+ mVideoWidth = videoWidth;
+ mVideoHeight = videoHeight;
+ return track;
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
diff --git a/media/java/android/media/tv/TvInputService.java b/media/java/android/media/tv/TvInputService.java
index 4f8facb..b19a1fb 100644
--- a/media/java/android/media/tv/TvInputService.java
+++ b/media/java/android/media/tv/TvInputService.java
@@ -25,10 +25,12 @@ import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.hardware.hdmi.HdmiDeviceInfo;
import android.net.Uri;
+import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
+import android.os.Process;
import android.os.RemoteCallbackList;
import android.os.RemoteException;
import android.text.TextUtils;
@@ -44,10 +46,12 @@ import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
import android.view.accessibility.CaptioningManager;
+import android.widget.FrameLayout;
import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.os.SomeArgs;
+import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -155,15 +159,6 @@ public abstract class TvInputService extends Service {
}
/**
- * Get the number of callbacks that are registered.
- * @hide
- */
- @VisibleForTesting
- public final int getRegisteredCallbackCount() {
- return mCallbacks.getRegisteredCallbackCount();
- }
-
- /**
* Returns a concrete implementation of {@link Session}.
* <p>
* May return {@code null} if this TV input service fails to create a session for some reason.
@@ -241,16 +236,25 @@ public abstract class TvInputService extends Service {
* Base class for derived classes to implement to provide a TV input session.
*/
public abstract static class Session implements KeyEvent.Callback {
+ private static final int DETACH_OVERLAY_VIEW_TIMEOUT = 5000;
private final KeyEvent.DispatcherState mDispatcherState = new KeyEvent.DispatcherState();
private final WindowManager mWindowManager;
final Handler mHandler;
private WindowManager.LayoutParams mWindowParams;
private Surface mSurface;
+ private Context mContext;
+ private FrameLayout mOverlayViewContainer;
private View mOverlayView;
+ private OverlayViewCleanUpTask mOverlayViewCleanUpTask;
private boolean mOverlayViewEnabled;
private IBinder mWindowToken;
private Rect mOverlayFrame;
+
+ private Object mLock = new Object();
+ // @GuardedBy("mLock")
private ITvInputSessionCallback mSessionCallback;
+ // @GuardedBy("mLock")
+ private List<Runnable> mPendingActions = new ArrayList<>();
/**
* Creates a new Session.
@@ -258,6 +262,7 @@ public abstract class TvInputService extends Service {
* @param context The context of the application
*/
public Session(Context context) {
+ mContext = context;
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
mHandler = new Handler(context.getMainLooper());
}
@@ -295,11 +300,12 @@ public abstract class TvInputService extends Service {
* @param eventArgs Optional arguments of the event.
* @hide
*/
+ @SystemApi
public void notifySessionEvent(final String eventType, final Bundle eventArgs) {
if (eventType == null) {
throw new IllegalArgumentException("eventType should not be null.");
}
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -318,7 +324,7 @@ public abstract class TvInputService extends Service {
* @param channelUri The URI of a channel.
*/
public void notifyChannelRetuned(final Uri channelUri) {
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -332,8 +338,13 @@ public abstract class TvInputService extends Service {
}
/**
- * Sends the change on the track information. This is expected to be called whenever a track
- * is added/removed and the metadata of a track is modified.
+ * Sends the list of all audio/video/subtitle tracks. The is used by the framework to
+ * maintain the track information for a given session, which in turn is used by
+ * {@link TvView#getTracks} for the application to retrieve metadata for a given track type.
+ * The TV input service must call this method as soon as the track information becomes
+ * available or is updated. Note that in a case where a part of the information for a
+ * certain track is updated, it is not necessary to create a new {@link TvTrackInfo} object
+ * with a different track ID.
*
* @param tracks A list which includes track information.
* @throws IllegalArgumentException if {@code tracks} contains redundant tracks.
@@ -350,7 +361,7 @@ public abstract class TvInputService extends Service {
trackIdSet.clear();
// TODO: Validate the track list.
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -364,8 +375,12 @@ public abstract class TvInputService extends Service {
}
/**
- * Sends the ID of the selected track for a given track type. This is expected to be called
- * whenever there is a change on track selection.
+ * Sends the type and ID of a selected track. This is used to inform the application that a
+ * specific track is selected. The TV input service must call this method as soon as a track
+ * is selected either by default or in response to a call to {@link #onSelectTrack}. The
+ * selected track ID for a given type is maintained in the framework until the next call to
+ * this method even after the entire track list is updated (but is reset when the session is
+ * tuned to a new channel), so care must be taken not to result in an obsolete track ID.
*
* @param type The type of the selected track. The type can be
* {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
@@ -374,7 +389,7 @@ public abstract class TvInputService extends Service {
* @see #onSelectTrack
*/
public void notifyTrackSelected(final int type, final String trackId) {
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -395,7 +410,7 @@ public abstract class TvInputService extends Service {
* @see #notifyVideoUnavailable
*/
public void notifyVideoAvailable() {
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -427,7 +442,7 @@ public abstract class TvInputService extends Service {
|| reason > TvInputManager.VIDEO_UNAVAILABLE_REASON_END) {
throw new IllegalArgumentException("Unknown reason: " + reason);
}
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -466,7 +481,7 @@ public abstract class TvInputService extends Service {
* @see TvInputManager
*/
public void notifyContentAllowed() {
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -506,7 +521,7 @@ public abstract class TvInputService extends Service {
* @see TvInputManager
*/
public void notifyContentBlocked(final TvContentRating rating) {
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
@@ -526,22 +541,23 @@ public abstract class TvInputService extends Service {
* @param left Left position in pixels, relative to the overlay view.
* @param top Top position in pixels, relative to the overlay view.
* @param right Right position in pixels, relative to the overlay view.
- * @param bottm Bottom position in pixels, relative to the overlay view.
+ * @param bottom Bottom position in pixels, relative to the overlay view.
* @see #onOverlayViewSizeChanged
* @hide
*/
@SystemApi
- public void layoutSurface(final int left, final int top, final int right, final int bottm) {
- if (left > right || top > bottm) {
+ public void layoutSurface(final int left, final int top, final int right,
+ final int bottom) {
+ if (left > right || top > bottom) {
throw new IllegalArgumentException("Invalid parameter");
}
- runOnMainThread(new Runnable() {
+ executeOrPostRunnable(new Runnable() {
@Override
public void run() {
try {
if (DEBUG) Log.d(TAG, "layoutSurface (l=" + left + ", t=" + top + ", r="
- + right + ", b=" + bottm + ",)");
- mSessionCallback.onLayoutSurface(left, top, right, bottm);
+ + right + ", b=" + bottom + ",)");
+ mSessionCallback.onLayoutSurface(left, top, right, bottom);
} catch (RemoteException e) {
Log.w(TAG, "error in layoutSurface");
}
@@ -837,12 +853,18 @@ public abstract class TvInputService extends Service {
* session.
*/
void release() {
- removeOverlayView(true);
onRelease();
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
+ synchronized(mLock) {
+ mSessionCallback = null;
+ mPendingActions.clear();
+ }
+ // Removes the overlay view lastly so that any hanging on the main thread can be handled
+ // in {@link #scheduleOverlayViewCleanup}.
+ removeOverlayView(true);
}
/**
@@ -927,9 +949,8 @@ public abstract class TvInputService extends Service {
* @param frame A position of the overlay view.
*/
void createOverlayView(IBinder windowToken, Rect frame) {
- if (mOverlayView != null) {
- mWindowManager.removeView(mOverlayView);
- mOverlayView = null;
+ if (mOverlayViewContainer != null) {
+ removeOverlayView(false);
}
if (DEBUG) Log.d(TAG, "create overlay view(" + frame + ")");
mWindowToken = windowToken;
@@ -942,6 +963,15 @@ public abstract class TvInputService extends Service {
if (mOverlayView == null) {
return;
}
+ if (mOverlayViewCleanUpTask != null) {
+ mOverlayViewCleanUpTask.cancel(true);
+ mOverlayViewCleanUpTask = null;
+ }
+ // Creates a container view to check hanging on the overlay view detaching.
+ // Adding/removing the overlay view to/from the container make the view attach/detach
+ // logic run on the main thread.
+ mOverlayViewContainer = new FrameLayout(mContext);
+ mOverlayViewContainer.addView(mOverlayView);
// TvView's window type is TYPE_APPLICATION_MEDIA and we want to create
// an overlay window above the media window but below the application window.
int type = WindowManager.LayoutParams.TYPE_APPLICATION_MEDIA_OVERLAY;
@@ -958,7 +988,7 @@ public abstract class TvInputService extends Service {
WindowManager.LayoutParams.PRIVATE_FLAG_NO_MOVE_ANIMATION;
mWindowParams.gravity = Gravity.START | Gravity.TOP;
mWindowParams.token = windowToken;
- mWindowManager.addView(mOverlayView, mWindowParams);
+ mWindowManager.addView(mOverlayViewContainer, mWindowParams);
}
/**
@@ -975,33 +1005,51 @@ public abstract class TvInputService extends Service {
onOverlayViewSizeChanged(frame.right - frame.left, frame.bottom - frame.top);
}
mOverlayFrame = frame;
- if (!mOverlayViewEnabled || mOverlayView == null) {
+ if (!mOverlayViewEnabled || mOverlayViewContainer == null) {
return;
}
mWindowParams.x = frame.left;
mWindowParams.y = frame.top;
mWindowParams.width = frame.right - frame.left;
mWindowParams.height = frame.bottom - frame.top;
- mWindowManager.updateViewLayout(mOverlayView, mWindowParams);
+ mWindowManager.updateViewLayout(mOverlayViewContainer, mWindowParams);
}
/**
* Removes the current overlay view.
*/
void removeOverlayView(boolean clearWindowToken) {
- if (DEBUG) Log.d(TAG, "removeOverlayView(" + mOverlayView + ")");
+ if (DEBUG) Log.d(TAG, "removeOverlayView(" + mOverlayViewContainer + ")");
if (clearWindowToken) {
mWindowToken = null;
mOverlayFrame = null;
}
- if (mOverlayView != null) {
- mWindowManager.removeView(mOverlayView);
+ if (mOverlayViewContainer != null) {
+ // Removes the overlay view from the view hierarchy in advance so that it can be
+ // cleaned up in the {@link OverlayViewCleanUpTask} if the remove process is
+ // hanging.
+ mOverlayViewContainer.removeView(mOverlayView);
mOverlayView = null;
+ mWindowManager.removeView(mOverlayViewContainer);
+ mOverlayViewContainer = null;
mWindowParams = null;
}
}
/**
+ * Schedules a task which checks whether the overlay view is detached and kills the process
+ * if it is not. Note that this method is expected to be called in a non-main thread.
+ */
+ void scheduleOverlayViewCleanup() {
+ View overlayViewParent = mOverlayViewContainer;
+ if (overlayViewParent != null) {
+ mOverlayViewCleanUpTask = new OverlayViewCleanUpTask();
+ mOverlayViewCleanUpTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR,
+ overlayViewParent);
+ }
+ }
+
+ /**
* Takes care of dispatching incoming input events and tells whether the event was handled.
*/
int dispatchInputEvent(InputEvent event, InputEventReceiver receiver) {
@@ -1030,46 +1078,89 @@ public abstract class TvInputService extends Service {
}
}
}
- if (mOverlayView == null || !mOverlayView.isAttachedToWindow()) {
+ if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()) {
return TvInputManager.Session.DISPATCH_NOT_HANDLED;
}
- if (!mOverlayView.hasWindowFocus()) {
- mOverlayView.getViewRootImpl().windowFocusChanged(true, true);
+ if (!mOverlayViewContainer.hasWindowFocus()) {
+ mOverlayViewContainer.getViewRootImpl().windowFocusChanged(true, true);
}
- if (isNavigationKey && mOverlayView.hasFocusable()) {
+ if (isNavigationKey && mOverlayViewContainer.hasFocusable()) {
// If mOverlayView has focusable views, navigation key events should be always
// handled. If not, it can make the application UI navigation messed up.
// For example, in the case that the left-most view is focused, a left key event
// will not be handled in ViewRootImpl. Then, the left key event will be handled in
// the application during the UI navigation of the TV input.
- mOverlayView.getViewRootImpl().dispatchInputEvent(event);
+ mOverlayViewContainer.getViewRootImpl().dispatchInputEvent(event);
return TvInputManager.Session.DISPATCH_HANDLED;
} else {
- mOverlayView.getViewRootImpl().dispatchInputEvent(event, receiver);
+ mOverlayViewContainer.getViewRootImpl().dispatchInputEvent(event, receiver);
return TvInputManager.Session.DISPATCH_IN_PROGRESS;
}
}
- private void setSessionCallback(ITvInputSessionCallback callback) {
- mSessionCallback = callback;
+ private void initialize(ITvInputSessionCallback callback) {
+ synchronized(mLock) {
+ mSessionCallback = callback;
+ for (Runnable runnable : mPendingActions) {
+ runnable.run();
+ }
+ mPendingActions.clear();
+ }
+ }
+
+ private final void executeOrPostRunnable(Runnable action) {
+ synchronized(mLock) {
+ if (mSessionCallback == null) {
+ // The session is not initialized yet.
+ mPendingActions.add(action);
+ } else {
+ if (mHandler.getLooper().isCurrentThread()) {
+ action.run();
+ } else {
+ // Posts the runnable if this is not called from the main thread
+ mHandler.post(action);
+ }
+ }
+ }
}
- private final void runOnMainThread(Runnable action) {
- if (mHandler.getLooper().isCurrentThread() && mSessionCallback != null) {
- action.run();
- } else {
- // Posts the runnable if this is not called from the main thread or the session
- // is not initialized yet.
- mHandler.post(action);
+ private final class OverlayViewCleanUpTask extends AsyncTask<View, Void, Void> {
+ @Override
+ protected Void doInBackground(View... views) {
+ View overlayViewParent = views[0];
+ try {
+ Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT);
+ } catch (InterruptedException e) {
+ return null;
+ }
+ if (isCancelled()) {
+ return null;
+ }
+ if (overlayViewParent.isAttachedToWindow()) {
+ Log.e(TAG, "Time out on releasing overlay view. Killing "
+ + overlayViewParent.getContext().getPackageName());
+ Process.killProcess(Process.myPid());
+ }
+ return null;
}
}
}
/**
* Base class for a TV input session which represents an external device connected to a
- * hardware TV input. Once TV input returns an implementation of this class on
- * {@link #onCreateSession(String)}, the framework will create a hardware session and forward
- * the application's surface to the hardware TV input.
+ * hardware TV input.
+ * <p>
+ * This class is for an input which provides channels for the external set-top box to the
+ * application. Once a TV input returns an implementation of this class on
+ * {@link #onCreateSession(String)}, the framework will create a separate session for
+ * a hardware TV Input (e.g. HDMI 1) and forward the application's surface to the session so
+ * that the user can see the screen of the hardware TV Input when she tunes to a channel from
+ * this TV input. The implementation of this class is expected to change the channel of the
+ * external set-top box via a proprietary protocol when {@link HardwareSession#onTune(Uri)} is
+ * requested by the application.
+ * </p><p>
+ * Note that this class is not for inputs for internal hardware like built-in tuner and HDMI 1.
+ * </p>
* @see #onCreateSession(String)
*/
public abstract static class HardwareSession extends Session {
@@ -1106,17 +1197,20 @@ public abstract class TvInputService extends Service {
mHardwareSession = session;
SomeArgs args = SomeArgs.obtain();
if (session != null) {
- args.arg1 = mProxySession;
- args.arg2 = mProxySessionCallback;
- args.arg3 = session.getToken();
+ args.arg1 = HardwareSession.this;
+ args.arg2 = mProxySession;
+ args.arg3 = mProxySessionCallback;
+ args.arg4 = session.getToken();
} else {
args.arg1 = null;
- args.arg2 = mProxySessionCallback;
- args.arg3 = null;
+ args.arg2 = null;
+ args.arg3 = mProxySessionCallback;
+ args.arg4 = null;
onRelease();
}
mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED, args)
.sendToTarget();
+ session.tune(TvContract.buildChannelUriForPassthroughInput(getHardwareInputId()));
}
@Override
@@ -1250,7 +1344,6 @@ public abstract class TvInputService extends Service {
}
return;
}
- sessionImpl.setSessionCallback(cb);
ITvInputSession stub = new ITvInputSessionWrapper(TvInputService.this,
sessionImpl, channel);
if (sessionImpl instanceof HardwareSession) {
@@ -1281,9 +1374,10 @@ public abstract class TvInputService extends Service {
proxySession.mHardwareSessionCallback, mServiceHandler);
} else {
SomeArgs someArgs = SomeArgs.obtain();
- someArgs.arg1 = stub;
- someArgs.arg2 = cb;
- someArgs.arg3 = null;
+ someArgs.arg1 = sessionImpl;
+ someArgs.arg2 = stub;
+ someArgs.arg3 = cb;
+ someArgs.arg4 = null;
mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED,
someArgs).sendToTarget();
}
@@ -1291,14 +1385,18 @@ public abstract class TvInputService extends Service {
}
case DO_NOTIFY_SESSION_CREATED: {
SomeArgs args = (SomeArgs) msg.obj;
- ITvInputSession stub = (ITvInputSession) args.arg1;
- ITvInputSessionCallback cb = (ITvInputSessionCallback) args.arg2;
- IBinder hardwareSessionToken = (IBinder) args.arg3;
+ Session sessionImpl = (Session) args.arg1;
+ ITvInputSession stub = (ITvInputSession) args.arg2;
+ ITvInputSessionCallback cb = (ITvInputSessionCallback) args.arg3;
+ IBinder hardwareSessionToken = (IBinder) args.arg4;
try {
cb.onSessionCreated(stub, hardwareSessionToken);
} catch (RemoteException e) {
Log.e(TAG, "error in onSessionCreated");
}
+ if (sessionImpl != null) {
+ sessionImpl.initialize(cb);
+ }
args.recycle();
return;
}
diff --git a/media/java/android/media/tv/TvStreamConfig.java b/media/java/android/media/tv/TvStreamConfig.java
index a7e7e44..1bdc63e 100644
--- a/media/java/android/media/tv/TvStreamConfig.java
+++ b/media/java/android/media/tv/TvStreamConfig.java
@@ -33,7 +33,6 @@ public class TvStreamConfig implements Parcelable {
private int mStreamId;
private int mType;
- // TODO: Revisit if max widht/height really make sense.
private int mMaxWidth;
private int mMaxHeight;
/**
@@ -166,4 +165,17 @@ public class TvStreamConfig implements Parcelable {
return config;
}
}
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) return false;
+ if (!(obj instanceof TvStreamConfig)) return false;
+
+ TvStreamConfig config = (TvStreamConfig) obj;
+ return config.mGeneration == mGeneration
+ && config.mStreamId == mStreamId
+ && config.mType == mType
+ && config.mMaxWidth == mMaxWidth
+ && config.mMaxHeight == mMaxHeight;
+ }
}
diff --git a/media/java/android/media/tv/TvView.java b/media/java/android/media/tv/TvView.java
index 0949b1a..6fc1b82 100644
--- a/media/java/android/media/tv/TvView.java
+++ b/media/java/android/media/tv/TvView.java
@@ -18,7 +18,10 @@ package android.media.tv;
import android.annotation.SystemApi;
import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.PorterDuff;
import android.graphics.Rect;
+import android.graphics.Region;
import android.media.tv.TvInputManager.Session;
import android.media.tv.TvInputManager.Session.FinishedInputEventCallback;
import android.media.tv.TvInputManager.SessionCallback;
@@ -59,8 +62,6 @@ public class TvView extends ViewGroup {
private static final String TAG = "TvView";
private static final boolean DEBUG = false;
- private static final int VIDEO_SIZE_VALUE_UNKNOWN = 0;
-
private static final int ZORDER_MEDIA = 0;
private static final int ZORDER_MEDIA_OVERLAY = 1;
private static final int ZORDER_ON_TOP = 2;
@@ -69,7 +70,7 @@ public class TvView extends ViewGroup {
private static final int CAPTION_ENABLED = 1;
private static final int CAPTION_DISABLED = 2;
- private static final WeakReference<TvView> NULL_TV_VIEW = new WeakReference(null);
+ private static final WeakReference<TvView> NULL_TV_VIEW = new WeakReference<>(null);
private static final Object sMainTvViewLock = new Object();
private static WeakReference<TvView> sMainTvView = NULL_TV_VIEW;
@@ -86,8 +87,10 @@ public class TvView extends ViewGroup {
private OnUnhandledInputEventListener mOnUnhandledInputEventListener;
private boolean mHasStreamVolume;
private float mStreamVolume;
- private int mVideoWidth = VIDEO_SIZE_VALUE_UNKNOWN;
- private int mVideoHeight = VIDEO_SIZE_VALUE_UNKNOWN;
+ private int mCaptionEnabled;
+ private String mAppPrivateCommandAction;
+ private Bundle mAppPrivateCommandData;
+
private boolean mSurfaceChanged;
private int mSurfaceFormat;
private int mSurfaceWidth;
@@ -100,7 +103,6 @@ public class TvView extends ViewGroup {
private int mSurfaceViewRight;
private int mSurfaceViewTop;
private int mSurfaceViewBottom;
- private int mCaptionEnabled;
private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
@Override
@@ -197,7 +199,7 @@ public class TvView extends ViewGroup {
@SystemApi
public void setMain() {
synchronized (sMainTvViewLock) {
- sMainTvView = new WeakReference(this);
+ sMainTvView = new WeakReference<>(this);
if (hasWindowFocus() && mSession != null) {
mSession.setMain();
}
@@ -291,7 +293,7 @@ public class TvView extends ViewGroup {
}
synchronized (sMainTvViewLock) {
if (sMainTvView.get() == null) {
- sMainTvView = new WeakReference(this);
+ sMainTvView = new WeakReference<>(this);
}
}
if (mSessionCallback != null && mSessionCallback.mInputId.equals(inputId)) {
@@ -421,10 +423,10 @@ public class TvView extends ViewGroup {
* Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
* TvInputService.Session.appPrivateCommand()} on the current TvView.
*
- * @param action Name of the command to be performed. This <em>must</em> be a scoped name, i.e.
- * prefixed with a package name you own, so that different developers will not create
- * conflicting commands.
- * @param data Any data to include with the command.
+ * @param action The name of the private command to send. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will not
+ * create conflicting commands.
+ * @param data An optional bundle to send with the command.
* @hide
*/
@SystemApi
@@ -434,6 +436,13 @@ public class TvView extends ViewGroup {
}
if (mSession != null) {
mSession.sendAppPrivateCommand(action, data);
+ } else {
+ Log.w(TAG, "sendAppPrivateCommand - session not created (action " + action + " cached)");
+ if (mAppPrivateCommandAction != null) {
+ Log.w(TAG, "previous cached action " + action + " removed");
+ }
+ mAppPrivateCommandAction = action;
+ mAppPrivateCommandData = data;
}
}
@@ -587,6 +596,42 @@ public class TvView extends ViewGroup {
}
@Override
+ public boolean gatherTransparentRegion(Region region) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ if (region != null) {
+ int width = getWidth();
+ int height = getHeight();
+ if (width > 0 && height > 0) {
+ int location[] = new int[2];
+ getLocationInWindow(location);
+ int left = location[0];
+ int top = location[1];
+ region.op(left, top, left + width, top + height, Region.Op.UNION);
+ }
+ }
+ }
+ return super.gatherTransparentRegion(region);
+ }
+
+ @Override
+ public void draw(Canvas canvas) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ // Punch a hole so that the underlying overlay view and surface can be shown.
+ canvas.drawColor(0, PorterDuff.Mode.CLEAR);
+ }
+ super.draw(canvas);
+ }
+
+ @Override
+ protected void dispatchDraw(Canvas canvas) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ // Punch a hole so that the underlying overlay view and surface can be shown.
+ canvas.drawColor(0, PorterDuff.Mode.CLEAR);
+ }
+ super.dispatchDraw(canvas);
+ }
+
+ @Override
protected void onVisibilityChanged(View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
mSurfaceView.setVisibility(visibility);
@@ -619,6 +664,9 @@ public class TvView extends ViewGroup {
}
private void release() {
+ mAppPrivateCommandAction = null;
+ mAppPrivateCommandData = null;
+
setSessionSurface(null);
removeSessionOverlayView();
mUseRequestedSurfaceLayout = false;
@@ -703,19 +751,8 @@ public class TvView extends ViewGroup {
}
/**
- * This is invoked when the view is tuned to a specific channel and starts decoding video
- * stream from there. It is also called later when the video size is changed.
- *
- * @param inputId The ID of the TV input bound to this view.
- * @param width The width of the video.
- * @param height The height of the video.
- */
- public void onVideoSizeChanged(String inputId, int width, int height) {
- }
-
- /**
* This is invoked when the channel of this TvView is changed by the underlying TV input
- * with out any {@link TvView#tune(String, Uri)} request.
+ * without any {@link TvView#tune(String, Uri)} request.
*
* @param inputId The ID of the TV input bound to this view.
* @param channelUri The URI of a channel.
@@ -745,6 +782,18 @@ public class TvView extends ViewGroup {
}
/**
+ * This is invoked when the video size has been changed. It is also called when the first
+ * time video size information becomes available after this view is tuned to a specific
+ * channel.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param width The width of the video.
+ * @param height The height of the video.
+ */
+ public void onVideoSizeChanged(String inputId, int width, int height) {
+ }
+
+ /**
* This is called when the video is available, so the TV input starts the playback.
*
* @param inputId The ID of the TV input bound to this view.
@@ -828,16 +877,17 @@ public class TvView extends ViewGroup {
@Override
public void onSessionCreated(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionCreated()");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionCreated - session already created");
// This callback is obsolete.
if (session != null) {
session.release();
}
return;
}
- if (DEBUG) {
- Log.d(TAG, "onSessionCreated()");
- }
mSession = session;
if (session != null) {
synchronized (sMainTvViewLock) {
@@ -862,6 +912,12 @@ public class TvView extends ViewGroup {
if (mHasStreamVolume) {
mSession.setStreamVolume(mStreamVolume);
}
+ if (mAppPrivateCommandAction != null) {
+ mSession.sendAppPrivateCommand(
+ mAppPrivateCommandAction, mAppPrivateCommandData);
+ mAppPrivateCommandAction = null;
+ mAppPrivateCommandData = null;
+ }
} else {
mSessionCallback = null;
if (mCallback != null) {
@@ -872,7 +928,11 @@ public class TvView extends ViewGroup {
@Override
public void onSessionReleased(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionReleased()");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionReleased - session not created");
return;
}
mOverlayViewCreated = false;
@@ -886,12 +946,13 @@ public class TvView extends ViewGroup {
@Override
public void onChannelRetuned(Session session, Uri channelUri) {
- if (this != mSessionCallback) {
- return;
- }
if (DEBUG) {
Log.d(TAG, "onChannelChangedByTvInput(" + channelUri + ")");
}
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onChannelRetuned - session not created");
+ return;
+ }
if (mCallback != null) {
mCallback.onChannelRetuned(mInputId, channelUri);
}
@@ -899,12 +960,13 @@ public class TvView extends ViewGroup {
@Override
public void onTracksChanged(Session session, List<TvTrackInfo> tracks) {
+ if (DEBUG) {
+ Log.d(TAG, "onTracksChanged(" + tracks + ")");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onTracksChanged - session not created");
return;
}
- if (DEBUG) {
- Log.d(TAG, "onTracksChanged()");
- }
if (mCallback != null) {
mCallback.onTracksChanged(mInputId, tracks);
}
@@ -912,26 +974,41 @@ public class TvView extends ViewGroup {
@Override
public void onTrackSelected(Session session, int type, String trackId) {
+ if (DEBUG) {
+ Log.d(TAG, "onTrackSelected(type=" + type + ", trackId=" + trackId + ")");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onTrackSelected - session not created");
return;
}
- if (DEBUG) {
- Log.d(TAG, "onTrackSelected()");
- }
- // TODO: Update the video size when the type is TYPE_VIDEO.
if (mCallback != null) {
mCallback.onTrackSelected(mInputId, type, trackId);
}
}
@Override
- public void onVideoAvailable(Session session) {
+ public void onVideoSizeChanged(Session session, int width, int height) {
+ if (DEBUG) {
+ Log.d(TAG, "onVideoSizeChanged()");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoSizeChanged - session not created");
return;
}
+ if (mCallback != null) {
+ mCallback.onVideoSizeChanged(mInputId, width, height);
+ }
+ }
+
+ @Override
+ public void onVideoAvailable(Session session) {
if (DEBUG) {
Log.d(TAG, "onVideoAvailable()");
}
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoAvailable - session not created");
+ return;
+ }
if (mCallback != null) {
mCallback.onVideoAvailable(mInputId);
}
@@ -939,12 +1016,13 @@ public class TvView extends ViewGroup {
@Override
public void onVideoUnavailable(Session session, int reason) {
+ if (DEBUG) {
+ Log.d(TAG, "onVideoUnavailable(reason=" + reason + ")");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoUnavailable - session not created");
return;
}
- if (DEBUG) {
- Log.d(TAG, "onVideoUnavailable(" + reason + ")");
- }
if (mCallback != null) {
mCallback.onVideoUnavailable(mInputId, reason);
}
@@ -952,12 +1030,13 @@ public class TvView extends ViewGroup {
@Override
public void onContentAllowed(Session session) {
- if (this != mSessionCallback) {
- return;
- }
if (DEBUG) {
Log.d(TAG, "onContentAllowed()");
}
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onContentAllowed - session not created");
+ return;
+ }
if (mCallback != null) {
mCallback.onContentAllowed(mInputId);
}
@@ -965,12 +1044,13 @@ public class TvView extends ViewGroup {
@Override
public void onContentBlocked(Session session, TvContentRating rating) {
+ if (DEBUG) {
+ Log.d(TAG, "onContentBlocked(rating=" + rating + ")");
+ }
if (this != mSessionCallback) {
+ Log.w(TAG, "onContentBlocked - session not created");
return;
}
- if (DEBUG) {
- Log.d(TAG, "onContentBlocked()");
- }
if (mCallback != null) {
mCallback.onContentBlocked(mInputId, rating);
}
@@ -978,13 +1058,14 @@ public class TvView extends ViewGroup {
@Override
public void onLayoutSurface(Session session, int left, int top, int right, int bottom) {
- if (this != mSessionCallback) {
- return;
- }
if (DEBUG) {
Log.d(TAG, "onLayoutSurface (left=" + left + ", top=" + top + ", right="
+ right + ", bottom=" + bottom + ",)");
}
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onLayoutSurface - session not created");
+ return;
+ }
mSurfaceViewLeft = left;
mSurfaceViewTop = top;
mSurfaceViewRight = right;
@@ -995,12 +1076,13 @@ public class TvView extends ViewGroup {
@Override
public void onSessionEvent(Session session, String eventType, Bundle eventArgs) {
- if (this != mSessionCallback) {
- return;
- }
if (DEBUG) {
Log.d(TAG, "onSessionEvent(" + eventType + ")");
}
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionEvent - session not created");
+ return;
+ }
if (mCallback != null) {
mCallback.onEvent(mInputId, eventType, eventArgs);
}
diff --git a/media/java/android/service/media/MediaBrowserService.java b/media/java/android/service/media/MediaBrowserService.java
index d50be42..26aedbd 100644
--- a/media/java/android/service/media/MediaBrowserService.java
+++ b/media/java/android/service/media/MediaBrowserService.java
@@ -101,7 +101,6 @@ public abstract class MediaBrowserService extends Service {
* be thrown.
*
* @see MediaBrowserService#onLoadChildren
- * @see MediaBrowserService#onLoadIcon
*/
public class Result<T> {
private Object mDebug;
@@ -190,8 +189,10 @@ public abstract class MediaBrowserService extends Service {
} else {
try {
mConnections.put(b, connection);
- callbacks.onConnect(connection.root.getRootId(),
- mSession, connection.root.getExtras());
+ if (mSession != null) {
+ callbacks.onConnect(connection.root.getRootId(),
+ mSession, connection.root.getExtras());
+ }
} catch (RemoteException ex) {
Log.w(TAG, "Calling onConnect() failed. Dropping client. "
+ "pkg=" + pkg);
@@ -320,16 +321,32 @@ public abstract class MediaBrowserService extends Service {
/**
* Call to set the media session.
* <p>
- * This must be called before onCreate returns.
- *
- * @return The media session token, must not be null.
+ * This should be called as soon as possible during the service's startup.
+ * It may only be called once.
*/
- public void setSessionToken(MediaSession.Token token) {
+ public void setSessionToken(final MediaSession.Token token) {
if (token == null) {
- throw new IllegalStateException(this.getClass().getName()
- + ".onCreateSession() set invalid MediaSession.Token");
+ throw new IllegalArgumentException("Session token may not be null.");
+ }
+ if (mSession != null) {
+ throw new IllegalStateException("The session token has already been set.");
}
mSession = token;
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ for (IBinder key : mConnections.keySet()) {
+ ConnectionRecord connection = mConnections.get(key);
+ try {
+ connection.callbacks.onConnect(connection.root.getRootId(), token,
+ connection.root.getExtras());
+ } catch (RemoteException e) {
+ Log.w(TAG, "Connection for " + connection.pkg + " is no longer valid.");
+ mConnections.remove(key);
+ }
+ }
+ }
+ });
}
/**
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index 45377ad..7e68c78 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -80,7 +80,7 @@ public:
virtual ~JNIImageReaderContext();
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
CpuConsumer::LockedBuffer* getLockedBuffer();
@@ -187,7 +187,7 @@ JNIImageReaderContext::~JNIImageReaderContext() {
mConsumer.clear();
}
-void JNIImageReaderContext::onFrameAvailable()
+void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
{
ALOGV("%s: frame available", __FUNCTION__);
bool needsDetach = false;
@@ -614,6 +614,24 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
return rowStride;
}
+static int Image_getBufferWidth(CpuConsumer::LockedBuffer* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->crop.isEmpty()) {
+ return buffer->crop.getWidth();
+ }
+ return buffer->width;
+}
+
+static int Image_getBufferHeight(CpuConsumer::LockedBuffer* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->crop.isEmpty()) {
+ return buffer->crop.getHeight();
+ }
+ return buffer->height;
+}
+
// ----------------------------------------------------------------------------
static void ImageReader_classInit(JNIEnv* env, jclass clazz)
@@ -794,33 +812,16 @@ static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
}
// Check if the producer buffer configurations match what ImageReader configured.
- // We want to fail for the very first image because this case is too bad.
- int outputWidth = buffer->width;
- int outputHeight = buffer->height;
-
- // Correct width/height when crop is set.
- if (!buffer->crop.isEmpty()) {
- outputWidth = buffer->crop.getWidth();
- outputHeight = buffer->crop.getHeight();
- }
+ int outputWidth = Image_getBufferWidth(buffer);
+ int outputHeight = Image_getBufferHeight(buffer);
int imgReaderFmt = ctx->getBufferFormat();
int imageReaderWidth = ctx->getBufferWidth();
int imageReaderHeight = ctx->getBufferHeight();
if ((buffer->format != HAL_PIXEL_FORMAT_BLOB) && (imgReaderFmt != HAL_PIXEL_FORMAT_BLOB) &&
- (imageReaderWidth != outputWidth || imageReaderHeight > outputHeight)) {
- /**
- * For video decoder, the buffer height is actually the vertical stride,
- * which is always >= actual image height. For future, decoder need provide
- * right crop rectangle to CpuConsumer to indicate the actual image height,
- * see bug 9563986. After this bug is fixed, we can enforce the height equal
- * check. Right now, only make sure buffer height is no less than ImageReader
- * height.
- */
- jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
- "Producer buffer size: %dx%d, doesn't match ImageReader configured size: %dx%d",
- outputWidth, outputHeight, imageReaderWidth, imageReaderHeight);
- return -1;
+ (imageReaderWidth != outputWidth || imageReaderHeight != outputHeight)) {
+ ALOGV("%s: Producer buffer size: %dx%d, doesn't match ImageReader configured size: %dx%d",
+ __FUNCTION__, outputWidth, outputHeight, imageReaderWidth, imageReaderHeight);
}
int bufFmt = buffer->format;
@@ -933,6 +934,19 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
return byteBuffer;
}
+static jint Image_getWidth(JNIEnv* env, jobject thiz)
+{
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferWidth(buffer);
+}
+
+static jint Image_getHeight(JNIEnv* env, jobject thiz)
+{
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferHeight(buffer);
+}
+
+
} // extern "C"
// ----------------------------------------------------------------------------
@@ -942,14 +956,16 @@ static JNINativeMethod gImageReaderMethods[] = {
{"nativeInit", "(Ljava/lang/Object;IIII)V", (void*)ImageReader_init },
{"nativeClose", "()V", (void*)ImageReader_close },
{"nativeReleaseImage", "(Landroid/media/Image;)V", (void*)ImageReader_imageRelease },
- {"nativeImageSetup", "(Landroid/media/Image;)I", (void*)ImageReader_imageSetup },
+ {"nativeImageSetup", "(Landroid/media/Image;)I", (void*)ImageReader_imageSetup },
{"nativeGetSurface", "()Landroid/view/Surface;", (void*)ImageReader_getSurface },
};
static JNINativeMethod gImageMethods[] = {
{"nativeImageGetBuffer", "(II)Ljava/nio/ByteBuffer;", (void*)Image_getByteBuffer },
{"nativeCreatePlane", "(II)Landroid/media/ImageReader$SurfaceImage$SurfacePlane;",
- (void*)Image_createSurfacePlane },
+ (void*)Image_createSurfacePlane },
+ {"nativeGetWidth", "()I", (void*)Image_getWidth },
+ {"nativeGetHeight", "()I", (void*)Image_getHeight },
};
int register_android_media_ImageReader(JNIEnv *env) {
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index 91eb499..d9de7a9 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -1003,6 +1003,27 @@ static jobject android_media_MediaDrm_getSecureStops(
return ListOfVectorsToArrayListOfByteArray(env, secureStops);
}
+static jbyteArray android_media_MediaDrm_getSecureStop(
+ JNIEnv *env, jobject thiz, jbyteArray ssid) {
+ sp<IDrm> drm = GetDrm(env, thiz);
+
+ if (drm == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "MediaDrm obj is null");
+ return NULL;
+ }
+
+ Vector<uint8_t> secureStop;
+
+ status_t err = drm->getSecureStop(JByteArrayToVector(env, ssid), secureStop);
+
+ if (throwExceptionAsNecessary(env, err, "Failed to get secure stop")) {
+ return NULL;
+ }
+
+ return VectorToJByteArray(env, secureStop);
+}
+
static void android_media_MediaDrm_releaseSecureStops(
JNIEnv *env, jobject thiz, jbyteArray jssRelease) {
sp<IDrm> drm = GetDrm(env, thiz);
@@ -1020,6 +1041,21 @@ static void android_media_MediaDrm_releaseSecureStops(
throwExceptionAsNecessary(env, err, "Failed to release secure stops");
}
+static void android_media_MediaDrm_releaseAllSecureStops(
+ JNIEnv *env, jobject thiz) {
+ sp<IDrm> drm = GetDrm(env, thiz);
+
+ if (drm == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "MediaDrm obj is null");
+ return;
+ }
+
+ status_t err = drm->releaseAllSecureStops();
+
+ throwExceptionAsNecessary(env, err, "Failed to release all secure stops");
+}
+
static jstring android_media_MediaDrm_getPropertyString(
JNIEnv *env, jobject thiz, jstring jname) {
sp<IDrm> drm = GetDrm(env, thiz);
@@ -1384,9 +1420,15 @@ static JNINativeMethod gMethods[] = {
{ "getSecureStops", "()Ljava/util/List;",
(void *)android_media_MediaDrm_getSecureStops },
+ { "getSecureStop", "([B)[B",
+ (void *)android_media_MediaDrm_getSecureStop },
+
{ "releaseSecureStops", "([B)V",
(void *)android_media_MediaDrm_releaseSecureStops },
+ { "releaseAllSecureStops", "()V",
+ (void *)android_media_MediaDrm_releaseAllSecureStops },
+
{ "getPropertyString", "(Ljava/lang/String;)Ljava/lang/String;",
(void *)android_media_MediaDrm_getPropertyString },
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index fcc3438..8b7d40d 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -182,7 +182,8 @@ static void
android_media_MediaRecorder_setAudioSource(JNIEnv *env, jobject thiz, jint as)
{
ALOGV("setAudioSource(%d)", as);
- if (as < AUDIO_SOURCE_DEFAULT || as >= AUDIO_SOURCE_CNT) {
+ if (as < AUDIO_SOURCE_DEFAULT ||
+ (as >= AUDIO_SOURCE_CNT && as != AUDIO_SOURCE_FM_TUNER)) {
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid audio source");
return;
}
diff --git a/media/jni/android_mtp_MtpDatabase.cpp b/media/jni/android_mtp_MtpDatabase.cpp
index f381488..803ab05 100644
--- a/media/jni/android_mtp_MtpDatabase.cpp
+++ b/media/jni/android_mtp_MtpDatabase.cpp
@@ -207,7 +207,8 @@ MyMtpDatabase::MyMtpDatabase(JNIEnv *env, jobject client)
return; // Already threw.
}
mLongBuffer = (jlongArray)env->NewGlobalRef(longArray);
- jcharArray charArray = env->NewCharArray(256);
+ // Needs to be long enough to hold a file path for getObjectFilePath()
+ jcharArray charArray = env->NewCharArray(PATH_MAX + 1);
if (!charArray) {
return; // Already threw.
}
@@ -468,6 +469,63 @@ out:
return result;
}
+static bool readLongValue(int type, MtpDataPacket& packet, jlong& longValue) {
+ switch (type) {
+ case MTP_TYPE_INT8: {
+ int8_t temp;
+ if (!packet.getInt8(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_UINT8: {
+ uint8_t temp;
+ if (!packet.getUInt8(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_INT16: {
+ int16_t temp;
+ if (!packet.getInt16(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_UINT16: {
+ uint16_t temp;
+ if (!packet.getUInt16(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_INT32: {
+ int32_t temp;
+ if (!packet.getInt32(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_UINT32: {
+ uint32_t temp;
+ if (!packet.getUInt32(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_INT64: {
+ int64_t temp;
+ if (!packet.getInt64(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ case MTP_TYPE_UINT64: {
+ uint64_t temp;
+ if (!packet.getUInt64(temp)) return false;
+ longValue = temp;
+ break;
+ }
+ default:
+ ALOGE("unsupported type in readLongValue");
+ return false;
+ }
+ return true;
+}
+
MtpResponseCode MyMtpDatabase::setObjectPropertyValue(MtpObjectHandle handle,
MtpObjectProperty property,
MtpDataPacket& packet) {
@@ -479,49 +537,22 @@ MtpResponseCode MyMtpDatabase::setObjectPropertyValue(MtpObjectHandle handle,
JNIEnv* env = AndroidRuntime::getJNIEnv();
jlong longValue = 0;
jstring stringValue = NULL;
+ MtpResponseCode result = MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
- switch (type) {
- case MTP_TYPE_INT8:
- longValue = packet.getInt8();
- break;
- case MTP_TYPE_UINT8:
- longValue = packet.getUInt8();
- break;
- case MTP_TYPE_INT16:
- longValue = packet.getInt16();
- break;
- case MTP_TYPE_UINT16:
- longValue = packet.getUInt16();
- break;
- case MTP_TYPE_INT32:
- longValue = packet.getInt32();
- break;
- case MTP_TYPE_UINT32:
- longValue = packet.getUInt32();
- break;
- case MTP_TYPE_INT64:
- longValue = packet.getInt64();
- break;
- case MTP_TYPE_UINT64:
- longValue = packet.getUInt64();
- break;
- case MTP_TYPE_STR:
- {
- MtpStringBuffer buffer;
- packet.getString(buffer);
- stringValue = env->NewStringUTF((const char *)buffer);
- break;
- }
- default:
- ALOGE("unsupported type in setObjectPropertyValue\n");
- return MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
+ if (type == MTP_TYPE_STR) {
+ MtpStringBuffer buffer;
+ if (!packet.getString(buffer)) goto fail;
+ stringValue = env->NewStringUTF((const char *)buffer);
+ } else {
+ if (!readLongValue(type, packet, longValue)) goto fail;
}
- jint result = env->CallIntMethod(mDatabase, method_setObjectProperty,
+ result = env->CallIntMethod(mDatabase, method_setObjectProperty,
(jint)handle, (jint)property, longValue, stringValue);
if (stringValue)
env->DeleteLocalRef(stringValue);
+fail:
checkAndClearExceptionFromCallback(env, __FUNCTION__);
return result;
}
@@ -609,49 +640,22 @@ MtpResponseCode MyMtpDatabase::setDevicePropertyValue(MtpDeviceProperty property
JNIEnv* env = AndroidRuntime::getJNIEnv();
jlong longValue = 0;
jstring stringValue = NULL;
+ MtpResponseCode result = MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT;
- switch (type) {
- case MTP_TYPE_INT8:
- longValue = packet.getInt8();
- break;
- case MTP_TYPE_UINT8:
- longValue = packet.getUInt8();
- break;
- case MTP_TYPE_INT16:
- longValue = packet.getInt16();
- break;
- case MTP_TYPE_UINT16:
- longValue = packet.getUInt16();
- break;
- case MTP_TYPE_INT32:
- longValue = packet.getInt32();
- break;
- case MTP_TYPE_UINT32:
- longValue = packet.getUInt32();
- break;
- case MTP_TYPE_INT64:
- longValue = packet.getInt64();
- break;
- case MTP_TYPE_UINT64:
- longValue = packet.getUInt64();
- break;
- case MTP_TYPE_STR:
- {
- MtpStringBuffer buffer;
- packet.getString(buffer);
- stringValue = env->NewStringUTF((const char *)buffer);
- break;
- }
- default:
- ALOGE("unsupported type in setDevicePropertyValue\n");
- return MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
+ if (type == MTP_TYPE_STR) {
+ MtpStringBuffer buffer;
+ if (!packet.getString(buffer)) goto fail;
+ stringValue = env->NewStringUTF((const char *)buffer);
+ } else {
+ if (!readLongValue(type, packet, longValue)) goto fail;
}
- jint result = env->CallIntMethod(mDatabase, method_setDeviceProperty,
+ result = env->CallIntMethod(mDatabase, method_setDeviceProperty,
(jint)property, longValue, stringValue);
if (stringValue)
env->DeleteLocalRef(stringValue);
+fail:
checkAndClearExceptionFromCallback(env, __FUNCTION__);
return result;
}
diff --git a/media/jni/audioeffect/android_media_AudioEffect.cpp b/media/jni/audioeffect/android_media_AudioEffect.cpp
index c9cefbd..c364d46 100644
--- a/media/jni/audioeffect/android_media_AudioEffect.cpp
+++ b/media/jni/audioeffect/android_media_AudioEffect.cpp
@@ -803,28 +803,12 @@ static jobjectArray
android_media_AudioEffect_native_queryPreProcessings(JNIEnv *env, jclass clazz __unused,
jint audioSession)
{
- // kDefaultNumEffects is a "reasonable" value ensuring that only one query will be enough on
- // most devices to get all active audio pre processing on a given session.
- static const uint32_t kDefaultNumEffects = 5;
-
- effect_descriptor_t *descriptors = new effect_descriptor_t[kDefaultNumEffects];
- uint32_t numEffects = kDefaultNumEffects;
+ effect_descriptor_t *descriptors = new effect_descriptor_t[AudioEffect::kMaxPreProcessing];
+ uint32_t numEffects = AudioEffect::kMaxPreProcessing;
status_t status = AudioEffect::queryDefaultPreProcessing(audioSession,
descriptors,
&numEffects);
- if ((status != NO_ERROR && status != NO_MEMORY) ||
- numEffects == 0) {
- delete[] descriptors;
- return NULL;
- }
- if (status == NO_MEMORY) {
- delete [] descriptors;
- descriptors = new effect_descriptor_t[numEffects];
- status = AudioEffect::queryDefaultPreProcessing(audioSession,
- descriptors,
- &numEffects);
- }
if (status != NO_ERROR || numEffects == 0) {
delete[] descriptors;
return NULL;
diff --git a/media/tests/omxjpegdecoder/Android.mk b/media/tests/omxjpegdecoder/Android.mk
deleted file mode 100644
index 70fdd05..0000000
--- a/media/tests/omxjpegdecoder/Android.mk
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- omx_jpeg_decoder.cpp \
- jpeg_decoder_bench.cpp \
- StreamSource.cpp
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libskia \
- libstagefright \
- libstagefright_foundation \
- libbinder \
- libutils \
- liblog \
- libjpeg
-
-LOCAL_C_INCLUDES := \
- $(TOP)/external/jpeg \
- $(TOP)/frameworks/base/media/libstagefright \
- $(TOP)/frameworks/base/include/ \
- $(TOP)/frameworks/base/ \
- $(TOP)/frameworks/native/include/media/openmax
-
-LOCAL_MODULE := jpeg_bench
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_CFLAGS += -Wall -Werror -Wunused -Wunreachable-code
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/tests/omxjpegdecoder/StreamSource.cpp b/media/tests/omxjpegdecoder/StreamSource.cpp
deleted file mode 100644
index f764121a..0000000
--- a/media/tests/omxjpegdecoder/StreamSource.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/ADebug.h>
-
-#include "StreamSource.h"
-
-namespace android {
-
-StreamSource::StreamSource(SkStream *stream)
- : mStream(stream) {
- CHECK(stream != NULL);
- mSize = stream->getLength();
-}
-
-StreamSource::~StreamSource() {
- delete mStream;
- mStream = NULL;
-}
-
-status_t StreamSource::initCheck() const {
- return mStream != NULL ? OK : NO_INIT;
-}
-
-ssize_t StreamSource::readAt(off64_t offset, void *data, size_t size) {
- Mutex::Autolock autoLock(mLock);
-
- mStream->rewind();
- mStream->skip(offset);
- ssize_t result = mStream->read(data, size);
-
- return result;
-}
-
-status_t StreamSource::getSize(off64_t *size) {
- *size = mSize;
- return OK;
-}
-
-} // namespace android
diff --git a/media/tests/omxjpegdecoder/StreamSource.h b/media/tests/omxjpegdecoder/StreamSource.h
deleted file mode 100644
index 6e4adfb..0000000
--- a/media/tests/omxjpegdecoder/StreamSource.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STREAM_SOURCE_H_
-
-#define STREAM_SOURCE_H_
-
-#include <stdio.h>
-
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#include <SkStream.h>
-#pragma GCC diagnostic pop
-
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaErrors.h>
-#include <utils/threads.h>
-
-namespace android {
-
-class StreamSource : public DataSource {
-public:
- // Pass the ownership of SkStream to StreamSource.
- StreamSource(SkStream *SkStream);
- virtual status_t initCheck() const;
- virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off64_t *size);
-
-protected:
- virtual ~StreamSource();
-
-private:
- SkStream *mStream;
- size_t mSize;
- Mutex mLock;
-
- StreamSource(const StreamSource &);
- StreamSource &operator=(const StreamSource &);
-};
-
-} // namespace android
-
-#endif // STREAM_SOURCE_H_
diff --git a/media/tests/omxjpegdecoder/jpeg_decoder_bench.cpp b/media/tests/omxjpegdecoder/jpeg_decoder_bench.cpp
deleted file mode 100644
index 83474d7..0000000
--- a/media/tests/omxjpegdecoder/jpeg_decoder_bench.cpp
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "OmxJpegDecoder"
-#include <sys/time.h>
-#include <utils/Log.h>
-
-#include <binder/ProcessState.h>
-
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#include "SkBitmap.h"
-#include "SkImageDecoder.h"
-#include "SkStream.h"
-#pragma GCC diagnostic pop
-
-#include "omx_jpeg_decoder.h"
-
-class SkJPEGImageDecoder : public SkImageDecoder {
-public:
- virtual Format getFormat() const {
- return kJPEG_Format;
- }
-
-protected:
- virtual bool onDecode(SkStream* stream, SkBitmap* bm, Mode);
-};
-
-int nullObjectReturn(const char msg[]) {
- if (msg) {
- SkDebugf("--- %s\n", msg);
- }
- return -1;
-}
-
-static int64_t getNowUs() {
- struct timeval tv;
- gettimeofday(&tv, NULL);
-
- return tv.tv_usec + (int64_t) tv.tv_sec * 1000000;
-}
-
-int testDecodeBounds(SkImageDecoder* decoder, SkStream* stream,
- SkBitmap* bitmap) {
- int64_t startTime = getNowUs();
- SkColorType prefColorType = kN32_SkColorType;
- SkImageDecoder::Mode decodeMode = SkImageDecoder::kDecodeBounds_Mode;
-
- // Decode the input stream and then use the bitmap.
- if (!decoder->decode(stream, bitmap, prefColorType, decodeMode)) {
- return nullObjectReturn("decoder->decode returned false");
- } else {
- int64_t delay = getNowUs() - startTime;
- printf("WidthxHeight: %dx%d\n", bitmap->width(), bitmap->height());
- printf("Decoding Time in BoundsMode %.1f msec.\n", delay / 1000.0f);
- return 0;
- }
-}
-
-int testDecodePixels(SkImageDecoder* decoder, SkStream* stream,
- SkBitmap* bitmap) {
- int64_t startTime = getNowUs();
- SkColorType prefColorType = kN32_SkColorType;
- SkImageDecoder::Mode decodeMode = SkImageDecoder::kDecodePixels_Mode;
-
- // Decode the input stream and then use the bitmap.
- if (!decoder->decode(stream, bitmap, prefColorType, decodeMode)) {
- return nullObjectReturn("decoder->decode returned false");
- } else {
- int64_t delay = getNowUs() - startTime;
- printf("Decoding Time in PixelsMode %.1f msec.\n", delay / 1000.0f);
- const char* filename = "/sdcard/omxJpegDecodedBitmap.rgba";
- return storeBitmapToFile(bitmap, filename);
- }
-}
-
-int testDecoder(SkImageDecoder* decoder, char* filename) {
- // test DecodeMode == Pixels
- SkStream* stream = new SkFILEStream(filename);
- SkBitmap* bitmap = new SkBitmap;
- testDecodePixels(decoder, stream, bitmap);
- delete bitmap;
-
- // test DecodeMode == Bounds
- stream = new SkFILEStream(filename);
- bitmap = new SkBitmap;
- testDecodeBounds(decoder, stream, bitmap);
- delete bitmap;
-
- delete decoder;
- return 0;
-}
-
-int main(int argc, char** argv) {
- if (argc < 2) {
- printf("Need a parameter!\n");
- return 1;
- }
-
- android::ProcessState::self()->startThreadPool();
-
- printf("Decoding jpeg with libjpeg...\n");
- SkJPEGImageDecoder* libjpeg = new SkJPEGImageDecoder;
- testDecoder(libjpeg, argv[1]);
-
- printf("\nDecoding jpeg with OMX...\n");
- OmxJpegImageDecoder* omx = new OmxJpegImageDecoder;
- testDecoder(omx, argv[1]);
- return 0;
-}
diff --git a/media/tests/omxjpegdecoder/omx_jpeg_decoder.cpp b/media/tests/omxjpegdecoder/omx_jpeg_decoder.cpp
deleted file mode 100644
index c54490e..0000000
--- a/media/tests/omxjpegdecoder/omx_jpeg_decoder.cpp
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "OmxJpegDecoder"
-#include <sys/time.h>
-#include <utils/Log.h>
-
-#include <stdlib.h>
-#include <string.h>
-#include <unistd.h>
-
-#include <binder/IServiceManager.h>
-#include <binder/ProcessState.h>
-#include <media/IMediaPlayerService.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/OMXClient.h>
-#include <media/stagefright/OMXCodec.h>
-
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#include <SkImage.h>
-#include <SkMallocPixelRef.h>
-#pragma GCC diagnostic pop
-
-#include "omx_jpeg_decoder.h"
-#include "StreamSource.h"
-
-using namespace android;
-
-extern int storeBitmapToFile(SkBitmap* bitmap, const char* filename) {
- bitmap->lockPixels();
- uint8_t* data = (uint8_t *)bitmap->getPixels();
- int size = bitmap->getSize();
- FILE* fp = fopen(filename, "w+");
-
- if (NULL == fp) {
- printf("Cannot open the output file! \n");
- return -1;
- } else {
- while (size > 0) {
- int numChars = fwrite(data, sizeof(char), 1024, fp);
- int numBytes = numChars * sizeof(char);
- size -= numBytes;
- data += numBytes;
- }
- fclose(fp);
- }
- return 0;
-}
-
-static int64_t getNowUs() {
- struct timeval tv;
- gettimeofday(&tv, NULL);
-
- return (int64_t)tv.tv_usec + tv.tv_sec * 1000000;
-}
-
-OmxJpegImageDecoder::OmxJpegImageDecoder() {
- status_t err = mClient.connect();
- CHECK_EQ(err, (status_t)OK);
-}
-
-OmxJpegImageDecoder::~OmxJpegImageDecoder() {
- mClient.disconnect();
-}
-
-bool OmxJpegImageDecoder::onDecode(SkStream* stream,
- SkBitmap* bm, Mode mode) {
- sp<MediaSource> source = prepareMediaSource(stream);
- sp<MetaData> meta = source->getFormat();
- int width;
- int height;
- meta->findInt32(kKeyWidth, &width);
- meta->findInt32(kKeyHeight, &height);
- configBitmapSize(
- bm, getPrefColorType(k32Bit_SrcDepth, false),
- width, height);
-
- // mode == DecodeBounds
- if (mode == SkImageDecoder::kDecodeBounds_Mode) {
- return true;
- }
-
- // mode == DecodePixels
- if (!this->allocPixelRef(bm, NULL)) {
- ALOGI("Cannot allocPixelRef()!");
- return false;
- }
-
- sp<MediaSource> decoder = getDecoder(&mClient, source);
- return decodeSource(decoder, source, bm);
-}
-
-JPEGSource* OmxJpegImageDecoder::prepareMediaSource(SkStream* stream) {
- DataSource::RegisterDefaultSniffers();
- sp<DataSource> dataSource = new StreamSource(stream);
- return new JPEGSource(dataSource);
-}
-
-sp<MediaSource> OmxJpegImageDecoder::getDecoder(
- OMXClient *client, const sp<MediaSource>& source) {
- sp<MetaData> meta = source->getFormat();
- sp<MediaSource> decoder = OMXCodec::Create(
- client->interface(), meta, false /* createEncoder */, source);
-
- CHECK(decoder != NULL);
- return decoder;
-}
-
-bool OmxJpegImageDecoder::decodeSource(sp<MediaSource> decoder,
- const sp<MediaSource>& /* source */, SkBitmap* bm) {
- status_t rt = decoder->start();
- if (rt != OK) {
- ALOGE("Cannot start OMX Decoder!");
- return false;
- }
- int64_t startTime = getNowUs();
- MediaBuffer *buffer;
-
- // decode source
- status_t err = decoder->read(&buffer, NULL);
- int64_t duration = getNowUs() - startTime;
-
- if (err != OK) {
- CHECK(buffer == NULL);
- }
- printf("Duration in decoder->read(): %.1f (msecs). \n",
- duration / 1E3 );
-
- // Copy pixels from buffer to bm.
- // May need to check buffer->rawBytes() == bm->rawBytes().
- CHECK_EQ(buffer->size(), bm->getSize());
- memcpy(bm->getPixels(), buffer->data(), buffer->size());
- buffer->release();
- decoder->stop();
-
- return true;
-}
-
-void OmxJpegImageDecoder::configBitmapSize(SkBitmap* bm, SkColorType /* pref */,
- int width, int height) {
- // Set the color space to ARGB_8888 for now (ignoring pref)
- // because of limitation in hardware support.
- bm->setInfo(SkImageInfo::MakeN32(width, height, kOpaque_SkAlphaType));
-}
diff --git a/media/tests/omxjpegdecoder/omx_jpeg_decoder.h b/media/tests/omxjpegdecoder/omx_jpeg_decoder.h
deleted file mode 100644
index 284410c..0000000
--- a/media/tests/omxjpegdecoder/omx_jpeg_decoder.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMXJPEGIMAGEDECODER
-#define OMXJPEGIMAGEDECODER
-
-#include <stdlib.h>
-#include <string.h>
-#include <unistd.h>
-
-#include <media/stagefright/JPEGSource.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/OMXClient.h>
-#include <media/stagefright/OMXCodec.h>
-
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#include <SkImageDecoder.h>
-#include <SkStream.h>
-#pragma GCC diagnostic pop
-
-using namespace android;
-
-extern int storeBitmapToFile(SkBitmap* bitmap, const char* filename);
-
-class OmxJpegImageDecoder : public SkImageDecoder {
-public:
- OmxJpegImageDecoder();
- ~OmxJpegImageDecoder();
-
- virtual Format getFormat() const {
- return kJPEG_Format;
- }
-
-protected:
- virtual bool onDecode(SkStream* stream, SkBitmap* bm, Mode mode);
-
-private:
- JPEGSource* prepareMediaSource(SkStream* stream);
- sp<MediaSource> getDecoder(OMXClient* client, const sp<MediaSource>& source);
- bool decodeSource(sp<MediaSource> decoder, const sp<MediaSource>& source,
- SkBitmap* bm);
- void configBitmapSize(SkBitmap* bm, SkColorType, int width, int height);
-
- OMXClient mClient;
-};
-
-#endif