summaryrefslogtreecommitdiffstats
path: root/media/java
diff options
context:
space:
mode:
authorJean-Baptiste Queru <jbq@google.com>2009-05-20 11:28:04 -0700
committerJean-Baptiste Queru <jbq@google.com>2009-05-20 11:28:04 -0700
commit843ef36f7b96cc19ea7d2996b7c8661b41ec3452 (patch)
tree560e1648c99a93986f8b7deef851ef8bb8029db7 /media/java
parent358d23017d0d6c4636eb7599ae7a9b48108899a3 (diff)
downloadframeworks_base-843ef36f7b96cc19ea7d2996b7c8661b41ec3452.zip
frameworks_base-843ef36f7b96cc19ea7d2996b7c8661b41ec3452.tar.gz
frameworks_base-843ef36f7b96cc19ea7d2996b7c8661b41ec3452.tar.bz2
donut snapshot
Diffstat (limited to 'media/java')
-rw-r--r--media/java/android/media/AudioManager.java82
-rw-r--r--media/java/android/media/AudioRecord.java81
-rw-r--r--media/java/android/media/AudioService.java176
-rw-r--r--media/java/android/media/AudioSystem.java2
-rw-r--r--media/java/android/media/AudioTrack.java111
-rw-r--r--media/java/android/media/JetPlayer.java173
-rw-r--r--media/java/android/media/MediaFile.java4
-rw-r--r--media/java/android/media/MediaScanner.java11
-rw-r--r--media/java/android/media/SoundPool.java137
-rw-r--r--media/java/android/media/ToneGenerator.java66
10 files changed, 660 insertions, 183 deletions
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index f509fb5..cfdf5e3 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -39,11 +39,6 @@ public class AudioManager {
private final Context mContext;
private final Handler mHandler;
- // used to listen for updates to the sound effects settings so we don't
- // poll it for every UI sound
- private ContentObserver mContentObserver;
-
-
private static String TAG = "AudioManager";
private static boolean DEBUG = false;
private static boolean localLOGV = DEBUG || android.util.Config.LOGV;
@@ -642,7 +637,9 @@ public class AudioManager {
* <var>false</var> to turn it off
*/
public void setSpeakerphoneOn(boolean on){
- setRouting(MODE_IN_CALL, on ? ROUTE_SPEAKER : ROUTE_EARPIECE, ROUTE_ALL);
+ // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
+ // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
+ setRoutingP(MODE_INVALID, on ? ROUTE_SPEAKER: 0, ROUTE_SPEAKER);
}
/**
@@ -651,7 +648,7 @@ public class AudioManager {
* @return true if speakerphone is on, false if it's off
*/
public boolean isSpeakerphoneOn() {
- return (getRouting(MODE_IN_CALL) & ROUTE_SPEAKER) == 0 ? false : true;
+ return (getRoutingP(MODE_IN_CALL) & ROUTE_SPEAKER) == 0 ? false : true;
}
/**
@@ -661,14 +658,9 @@ public class AudioManager {
* headset; <var>false</var> to route audio to/from phone earpiece
*/
public void setBluetoothScoOn(boolean on){
- // Don't disable A2DP when turning off SCO.
- // A2DP does not affect in-call routing.
- setRouting(MODE_RINGTONE,
- on ? ROUTE_BLUETOOTH_SCO: ROUTE_SPEAKER, ROUTE_ALL & ~ROUTE_BLUETOOTH_A2DP);
- setRouting(MODE_NORMAL,
- on ? ROUTE_BLUETOOTH_SCO: ROUTE_SPEAKER, ROUTE_ALL & ~ROUTE_BLUETOOTH_A2DP);
- setRouting(MODE_IN_CALL,
- on ? ROUTE_BLUETOOTH_SCO: ROUTE_EARPIECE, ROUTE_ALL);
+ // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
+ // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
+ setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_SCO: 0, ROUTE_BLUETOOTH_SCO);
}
/**
@@ -678,7 +670,7 @@ public class AudioManager {
* false if otherwise
*/
public boolean isBluetoothScoOn() {
- return (getRouting(MODE_IN_CALL) & ROUTE_BLUETOOTH_SCO) == 0 ? false : true;
+ return (getRoutingP(MODE_IN_CALL) & ROUTE_BLUETOOTH_SCO) == 0 ? false : true;
}
/**
@@ -688,12 +680,9 @@ public class AudioManager {
* headset; <var>false</var> disable A2DP audio
*/
public void setBluetoothA2dpOn(boolean on){
- // the audio flinger chooses A2DP as a higher priority,
- // so there is no need to disable other routes.
- setRouting(MODE_RINGTONE,
- on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP);
- setRouting(MODE_NORMAL,
- on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP);
+ // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
+ // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
+ setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP);
}
/**
@@ -703,7 +692,7 @@ public class AudioManager {
* false if otherwise
*/
public boolean isBluetoothA2dpOn() {
- return (getRouting(MODE_NORMAL) & ROUTE_BLUETOOTH_A2DP) == 0 ? false : true;
+ return (getRoutingP(MODE_NORMAL) & ROUTE_BLUETOOTH_A2DP) == 0 ? false : true;
}
/**
@@ -714,14 +703,9 @@ public class AudioManager {
* @hide
*/
public void setWiredHeadsetOn(boolean on){
- // A2DP has higher priority than wired headset, so headset connect/disconnect events
- // should not affect A2DP routing
- setRouting(MODE_NORMAL,
- on ? ROUTE_HEADSET : ROUTE_SPEAKER, ROUTE_ALL & ~ROUTE_BLUETOOTH_A2DP);
- setRouting(MODE_RINGTONE,
- on ? ROUTE_HEADSET | ROUTE_SPEAKER : ROUTE_SPEAKER, ROUTE_ALL & ~ROUTE_BLUETOOTH_A2DP);
- setRouting(MODE_IN_CALL,
- on ? ROUTE_HEADSET : ROUTE_EARPIECE, ROUTE_ALL);
+ // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
+ // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
+ setRoutingP(MODE_INVALID, on ? ROUTE_HEADSET: 0, ROUTE_HEADSET);
}
/**
@@ -732,7 +716,7 @@ public class AudioManager {
* @hide
*/
public boolean isWiredHeadsetOn() {
- return (getRouting(MODE_NORMAL) & ROUTE_HEADSET) == 0 ? false : true;
+ return (getRoutingP(MODE_NORMAL) & ROUTE_HEADSET) == 0 ? false : true;
}
/**
@@ -860,7 +844,11 @@ public class AudioManager {
* more of ROUTE_xxx types. Set bits indicate that route should be on
* @param mask bit vector of routes to change, created from one or more of
* ROUTE_xxx types. Unset bits indicate the route should be left unchanged
+ *
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn(), setBluetoothA2dpOn() and setWiredHeadsetOn() methods instead.
*/
+
public void setRouting(int mode, int routes, int mask) {
IAudioService service = getService();
try {
@@ -876,7 +864,10 @@ public class AudioManager {
* @param mode audio mode to get route (e.g., MODE_RINGTONE)
* @return an audio route bit vector that can be compared with ROUTE_xxx
* bits
+ * @deprecated Do not query audio routing directly, use isSpeakerphoneOn(),
+ * isBluetoothScoOn(), isBluetoothA2dpOn() and isWiredHeadsetOn() methods instead.
*/
+ @Deprecated
public int getRouting(int mode) {
IAudioService service = getService();
try {
@@ -1076,4 +1067,31 @@ public class AudioManager {
* {@hide}
*/
private IBinder mICallBack = new Binder();
+
+ /**
+ * {@hide}
+ */
+ private void setRoutingP(int mode, int routes, int mask) {
+ IAudioService service = getService();
+ try {
+ service.setRouting(mode, routes, mask);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in setRouting", e);
+ }
+ }
+
+
+ /**
+ * {@hide}
+ */
+ private int getRoutingP(int mode) {
+ IAudioService service = getService();
+ try {
+ return service.getRouting(mode);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in getRouting", e);
+ return -1;
+ }
+ }
+
}
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index a49bd67..3346bed 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -40,7 +40,7 @@ import android.util.Log;
* <p>Upon creation, an AudioRecord object initializes its associated audio buffer that it will
* fill with the new audio data. The size of this buffer, specified during the construction,
* determines how long an AudioRecord can record before "over-running" data that has not
- * been read yet. Data should be from the audio hardware in chunks of sizes inferior to
+ * been read yet. Data should be read from the audio hardware in chunks of sizes inferior to
* the total recording buffer size.
*/
public class AudioRecord
@@ -49,20 +49,20 @@ public class AudioRecord
// Constants
//--------------------
/**
- * State of an AudioRecord that was not successfully initialized upon creation
+ * indicates AudioRecord state is not successfully initialized.
*/
public static final int STATE_UNINITIALIZED = 0;
/**
- * State of an AudioRecord that is ready to be used
+ * indicates AudioRecord state is ready to be used
*/
public static final int STATE_INITIALIZED = 1;
/**
- * State of an AudioRecord this is not recording
+ * indicates AudioRecord recording state is not recording
*/
public static final int RECORDSTATE_STOPPED = 1; // matches SL_RECORDSTATE_STOPPED
/**
- * State of an AudioRecord this is recording
+ * indicates AudioRecord recording state is recording
*/
public static final int RECORDSTATE_RECORDING = 3;// matches SL_RECORDSTATE_RECORDING
@@ -94,11 +94,11 @@ public class AudioRecord
// Events:
// to keep in sync with frameworks/base/include/media/AudioRecord.h
/**
- * Event id for when the recording head has reached a previously set marker.
+ * Event id denotes when record head has reached a previously set marker.
*/
private static final int NATIVE_EVENT_MARKER = 2;
/**
- * Event id for when the previously set update period has passed during recording.
+ * Event id denotes when previously set update period has elapsed during recording.
*/
private static final int NATIVE_EVENT_NEW_POS = 3;
@@ -188,7 +188,7 @@ public class AudioRecord
*/
private int mNativeBufferSizeInBytes = 0;
-
+
//---------------------------------------------------------
// Constructor, Finalize
//--------------------
@@ -206,7 +206,9 @@ public class AudioRecord
* {@link AudioFormat#ENCODING_PCM_8BIT}
* @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
* to during the recording. New audio data can be read from this buffer in smaller chunks
- * than this size.
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance. Using values
+ * smaller than getMinBufferSize() will result in an initialization failure.
* @throws java.lang.IllegalArgumentException
*/
public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat,
@@ -319,11 +321,13 @@ public class AudioRecord
mNativeBufferSizeInBytes = audioBufferSize;
}
-
-
+
+
/**
* Releases the native AudioRecord resources.
+ * The object can no longer be used and the reference should be set to null
+ * after a call to release()
*/
public void release() {
try {
@@ -334,7 +338,7 @@ public class AudioRecord
native_release();
mState = STATE_UNINITIALIZED;
}
-
+
@Override
protected void finalize() {
@@ -404,24 +408,27 @@ public class AudioRecord
public int getRecordingState() {
return mRecordingState;
}
-
+
/**
- * @return marker position in frames
+ * Returns the notification marker position expressed in frames.
*/
public int getNotificationMarkerPosition() {
return native_get_marker_pos();
}
/**
- * @return update period in frames
+ * Returns the notification update period expressed in frames.
*/
public int getPositionNotificationPeriod() {
return native_get_pos_update_period();
}
-
+
/**
* Returns the minimum buffer size required for the successful creation of an AudioRecord
* object.
+ * Note that this size doesn't guarantee a smooth recording under load, and higher values
+ * should be chosen according to the expected frequency at which the AudioRecord instance
+ * will be polled for new data.
* @param sampleRateInHz the sample rate expressed in Hertz.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
@@ -432,7 +439,7 @@ public class AudioRecord
* hardware, or an invalid parameter was passed,
* or {@link #ERROR} if the implementation was unable to query the hardware for its
* output properties,
- * or the minimum buffer size expressed in of bytes.
+ * or the minimum buffer size expressed in bytes.
*/
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
@@ -516,7 +523,7 @@ public class AudioRecord
/**
* Reads audio data from the audio hardware for recording into a buffer.
* @param audioData the array to which the recorded audio data is written.
- * @param offsetInBytes index in audioData from which the data is written.
+ * @param offsetInBytes index in audioData from which the data is written expressed in bytes.
* @param sizeInBytes the number of requested bytes.
* @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
@@ -540,9 +547,9 @@ public class AudioRecord
/**
* Reads audio data from the audio hardware for recording into a buffer.
* @param audioData the array to which the recorded audio data is written.
- * @param offsetInShorts index in audioData from which the data is written.
+ * @param offsetInShorts index in audioData from which the data is written expressed in shorts.
* @param sizeInShorts the number of requested shorts.
- * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * @return the number of shorts that were read or or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
* the parameters don't resolve to valid data and indexes.
* The number of shorts will not exceed sizeInShorts.
@@ -595,8 +602,15 @@ public class AudioRecord
public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener) {
setRecordPositionUpdateListener(listener, null);
}
-
+ /**
+ * Sets the listener the AudioRecord notifies when a previously set marker is reached or
+ * for each periodic record head position update.
+ * Use this method to receive AudioRecord events in the Handler associated with another
+ * thread than the one in which you created the AudioTrack instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener,
Handler handler) {
synchronized (mPositionListenerLock) {
@@ -616,8 +630,8 @@ public class AudioRecord
}
}
-
-
+
+
/**
* Sets the marker position at which the listener is called, if set with
* {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
@@ -629,8 +643,8 @@ public class AudioRecord
public int setNotificationMarkerPosition(int markerInFrames) {
return native_set_marker_pos(markerInFrames);
}
-
-
+
+
/**
* Sets the period at which the listener is called, if set with
* {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
@@ -648,7 +662,9 @@ public class AudioRecord
//--------------------
/**
* Interface definition for a callback to be invoked when an AudioRecord has
- * reached a notification marker set by setNotificationMarkerPosition().
+ * reached a notification marker set by {@link AudioRecord#setNotificationMarkerPosition(int)}
+ * or for periodic updates on the progress of the record head, as set by
+ * {@link AudioRecord#setPositionNotificationPeriod(int)}.
*/
public interface OnRecordPositionUpdateListener {
/**
@@ -663,10 +679,9 @@ public class AudioRecord
*/
void onPeriodicNotification(AudioRecord recorder);
}
-
-
-
+
+
//---------------------------------------------------------
// Inner classes
//--------------------
@@ -678,12 +693,12 @@ public class AudioRecord
private class NativeEventHandler extends Handler {
private final AudioRecord mAudioRecord;
-
+
NativeEventHandler(AudioRecord recorder, Looper looper) {
super(looper);
mAudioRecord = recorder;
}
-
+
@Override
public void handleMessage(Message msg) {
OnRecordPositionUpdateListener listener = null;
@@ -779,7 +794,3 @@ public class AudioRecord
}
-
-
-
-
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 2e3e460..881de4d 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -100,6 +100,10 @@ public class AudioService extends IAudioService.Stub {
private int[] mRoutes = new int[AudioSystem.NUM_MODES];
private Object mSettingsLock = new Object();
private boolean mMediaServerOk;
+ private boolean mSpeakerIsOn;
+ private boolean mBluetoothScoIsConnected;
+ private boolean mHeadsetIsConnected;
+ private boolean mBluetoothA2dpIsConnected;
private SoundPool mSoundPool;
private Object mSoundEffectsLock = new Object();
@@ -189,6 +193,10 @@ public class AudioService extends IAudioService.Stub {
mMediaServerOk = true;
AudioSystem.setErrorCallback(mAudioSystemCallback);
loadSoundEffects();
+ mSpeakerIsOn = false;
+ mBluetoothScoIsConnected = false;
+ mHeadsetIsConnected = false;
+ mBluetoothA2dpIsConnected = false;
}
private void createAudioSystemThread() {
@@ -606,8 +614,9 @@ public class AudioService extends IAudioService.Stub {
}
synchronized (mSettingsLock) {
if (mode != mMode) {
- AudioSystem.setMode(mode);
- mMode = mode;
+ if (AudioSystem.setMode(mode) == AudioSystem.AUDIO_STATUS_OK) {
+ mMode = mode;
+ }
}
int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
int index = mStreamStates[streamType].mIndex;
@@ -623,18 +632,167 @@ public class AudioService extends IAudioService.Stub {
/** @see AudioManager#setRouting(int, int, int) */
public void setRouting(int mode, int routes, int mask) {
+ int incallMask = 0;
+ int ringtoneMask = 0;
+ int normalMask = 0;
+
if (!checkAudioSettingsPermission("setRouting()")) {
return;
}
synchronized (mSettingsLock) {
- if ((mRoutes[mode] & mask) != (routes & mask)) {
- AudioSystem.setRouting(mode, routes, mask);
- mRoutes[mode] = (mRoutes[mode] & ~mask) | (routes & mask);
+ // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
+ // mode AudioSystem.MODE_INVALID is used only by the following AudioManager methods:
+ // setWiredHeadsetOn(), setBluetoothA2dpOn(), setBluetoothScoOn() and setSpeakerphoneOn().
+ // If applications are using AudioManager.setRouting() that is now deprecated, the routing
+ // command will be ignored.
+ if (mode == AudioSystem.MODE_INVALID) {
+ switch (mask) {
+ case AudioSystem.ROUTE_SPEAKER:
+ // handle setSpeakerphoneOn()
+ if (routes != 0 && !mSpeakerIsOn) {
+ mSpeakerIsOn = true;
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
+ incallMask = AudioSystem.ROUTE_ALL;
+ } else if (mSpeakerIsOn) {
+ mSpeakerIsOn = false;
+ if (mBluetoothScoIsConnected) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO;
+ } else if (mHeadsetIsConnected) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
+ } else {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
+ }
+ incallMask = AudioSystem.ROUTE_ALL;
+ }
+ break;
+
+ case AudioSystem.ROUTE_BLUETOOTH_SCO:
+ // handle setBluetoothScoOn()
+ if (routes != 0 && !mBluetoothScoIsConnected) {
+ mBluetoothScoIsConnected = true;
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO;
+ mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_BLUETOOTH_SCO;
+ mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_BLUETOOTH_SCO;
+ incallMask = AudioSystem.ROUTE_ALL;
+ // A2DP has higher priority than SCO headset, so headset connect/disconnect events
+ // should not affect A2DP routing
+ ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ } else if (mBluetoothScoIsConnected) {
+ mBluetoothScoIsConnected = false;
+ if (mHeadsetIsConnected) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
+ mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER);
+ mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_HEADSET;
+ } else {
+ if (mSpeakerIsOn) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
+ } else {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
+ }
+ mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_SPEAKER;
+ mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_SPEAKER;
+ }
+ incallMask = AudioSystem.ROUTE_ALL;
+ // A2DP has higher priority than SCO headset, so headset connect/disconnect events
+ // should not affect A2DP routing
+ ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ }
+ break;
+
+ case AudioSystem.ROUTE_HEADSET:
+ // handle setWiredHeadsetOn()
+ if (routes != 0 && !mHeadsetIsConnected) {
+ mHeadsetIsConnected = true;
+ // do not act upon headset connection if bluetooth SCO is connected to match phone app behavior
+ if (!mBluetoothScoIsConnected) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
+ mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER);
+ mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_HEADSET;
+ incallMask = AudioSystem.ROUTE_ALL;
+ // A2DP has higher priority than wired headset, so headset connect/disconnect events
+ // should not affect A2DP routing
+ ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ }
+ } else if (mHeadsetIsConnected) {
+ mHeadsetIsConnected = false;
+ // do not act upon headset disconnection if bluetooth SCO is connected to match phone app behavior
+ if (!mBluetoothScoIsConnected) {
+ if (mSpeakerIsOn) {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
+ } else {
+ mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
+ }
+ mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_SPEAKER;
+ mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
+ AudioSystem.ROUTE_SPEAKER;
+
+ incallMask = AudioSystem.ROUTE_ALL;
+ // A2DP has higher priority than wired headset, so headset connect/disconnect events
+ // should not affect A2DP routing
+ ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ }
+ }
+ break;
+
+ case AudioSystem.ROUTE_BLUETOOTH_A2DP:
+ // handle setBluetoothA2dpOn()
+ if (routes != 0 && !mBluetoothA2dpIsConnected) {
+ mBluetoothA2dpIsConnected = true;
+ mRoutes[AudioSystem.MODE_RINGTONE] |= AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ mRoutes[AudioSystem.MODE_NORMAL] |= AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ // the audio flinger chooses A2DP as a higher priority,
+ // so there is no need to disable other routes.
+ ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ } else if (mBluetoothA2dpIsConnected) {
+ mBluetoothA2dpIsConnected = false;
+ mRoutes[AudioSystem.MODE_RINGTONE] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ mRoutes[AudioSystem.MODE_NORMAL] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ // the audio flinger chooses A2DP as a higher priority,
+ // so there is no need to disable other routes.
+ ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ }
+ break;
+ }
+
+ // incallMask is != 0 means we must apply ne routing to MODE_IN_CALL mode
+ if (incallMask != 0) {
+ AudioSystem.setRouting(AudioSystem.MODE_IN_CALL,
+ mRoutes[AudioSystem.MODE_IN_CALL],
+ incallMask);
+ }
+ // ringtoneMask is != 0 means we must apply ne routing to MODE_RINGTONE mode
+ if (ringtoneMask != 0) {
+ AudioSystem.setRouting(AudioSystem.MODE_RINGTONE,
+ mRoutes[AudioSystem.MODE_RINGTONE],
+ ringtoneMask);
+ }
+ // normalMask is != 0 means we must apply ne routing to MODE_NORMAL mode
+ if (normalMask != 0) {
+ AudioSystem.setRouting(AudioSystem.MODE_NORMAL,
+ mRoutes[AudioSystem.MODE_NORMAL],
+ normalMask);
+ }
+
+ int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
+ int index = mStreamStates[streamType].mIndex;
+ syncRingerAndNotificationStreamVolume(streamType, index, true);
+ setStreamVolumeInt(streamType, index, true);
}
- int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
- int index = mStreamStates[streamType].mIndex;
- syncRingerAndNotificationStreamVolume(streamType, index, true);
- setStreamVolumeInt(streamType, index, true);
}
}
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index d0fa795..5917ab9 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -200,7 +200,7 @@ public class AudioSystem
* param error error code:
* - AUDIO_STATUS_OK
* - AUDIO_STATUS_SERVER_DIED
- * - UDIO_STATUS_ERROR
+ * - AUDIO_STATUS_ERROR
*/
void onError(int error);
};
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 4196ef3..3cd841d 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -32,22 +32,24 @@ import android.util.Log;
* It allows to stream PCM audio buffers to the audio hardware for playback. This is
* achieved by "pushing" the data to the AudioTrack object using one of the
* {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
- * <p>An AudioTrack instance can operate under two modes: static of streaming.<br>
- * The Streaming mode consists in continuously writing data to the AudioTrack, using one
- * of the write() methods. These are blocking and return when the data has been transferred
- * from the Java layer to the native layer, and is queued for playback. The streaming mode
+ *
+ * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
+ * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
+ * one of the write() methods. These are blocking and return when the data has been transferred
+ * from the Java layer to the native layer and queued for playback. The streaming mode
* is most useful when playing blocks of audio data that for instance are:
* <ul>
* <li>too big to fit in memory because of the duration of the sound to play,</li>
* <li>too big to fit in memory because of the characteristics of the audio data
* (high sampling rate, bits per sample ...)</li>
- * <li>chosen, received or generated as the audio keeps playing.</li>
+ * <li>received or generated while previously queued audio is playing.</li>
* </ul>
* The static mode is to be chosen when dealing with short sounds that fit in memory and
- * that need to be played with the smallest latency possible. Static mode AudioTrack instances can
- * play the sound without the need to transfer the audio data from Java to the audio hardware
+ * that need to be played with the smallest latency possible. AudioTrack instances in static mode
+ * can play the sound without the need to transfer the audio data from Java to native layer
* each time the sound is to be played. The static mode will therefore be preferred for UI and
* game sounds that are played often, and with the smallest overhead possible.
+ *
* <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
* The size of this buffer, specified during the construction, determines how long an AudioTrack
* can play before running out of data.<br>
@@ -66,11 +68,11 @@ public class AudioTrack
/** Maximum value for a channel volume */
private static final float VOLUME_MAX = 1.0f;
- /** state of an AudioTrack this is stopped */
+ /** indicates AudioTrack state is stopped */
public static final int PLAYSTATE_STOPPED = 1; // matches SL_PLAYSTATE_STOPPED
- /** state of an AudioTrack this is paused */
+ /** indicates AudioTrack state is paused */
public static final int PLAYSTATE_PAUSED = 2; // matches SL_PLAYSTATE_PAUSED
- /** state of an AudioTrack this is playing */
+ /** indicates AudioTrack state is playing */
public static final int PLAYSTATE_PLAYING = 3; // matches SL_PLAYSTATE_PLAYING
/**
@@ -85,7 +87,7 @@ public class AudioTrack
public static final int MODE_STREAM = 1;
/**
- * State of an AudioTrack that was not successfully initialized upon creation
+ * State of an AudioTrack that was not successfully initialized upon creation.
*/
public static final int STATE_UNINITIALIZED = 0;
/**
@@ -126,11 +128,11 @@ public class AudioTrack
// Events:
// to keep in sync with frameworks/base/include/media/AudioTrack.h
/**
- * Event id for when the playback head has reached a previously set marker.
+ * Event id denotes when playback head has reached a previously set marker.
*/
private static final int NATIVE_EVENT_MARKER = 3;
/**
- * Event id for when the previously set update period has passed during playback.
+ * Event id denotes when previously set update period has elapsed during playback.
*/
private static final int NATIVE_EVENT_NEW_POS = 4;
@@ -141,11 +143,11 @@ public class AudioTrack
// Member variables
//--------------------
/**
- * Indicates the state of the AudioTrack instance
+ * Indicates the state of the AudioTrack instance.
*/
private int mState = STATE_UNINITIALIZED;
/**
- * Indicates the play state of the AudioTrack instance
+ * Indicates the play state of the AudioTrack instance.
*/
private int mPlayState = PLAYSTATE_STOPPED;
/**
@@ -159,7 +161,7 @@ public class AudioTrack
*/
private OnPlaybackPositionUpdateListener mPositionListener = null;
/**
- * Lock to protect event listener updates against event notifications
+ * Lock to protect event listener updates against event notifications.
*/
private final Object mPositionListenerLock = new Object();
/**
@@ -167,11 +169,11 @@ public class AudioTrack
*/
private int mNativeBufferSizeInBytes = 0;
/**
- * Handler for marker events coming from the native code
+ * Handler for marker events coming from the native code.
*/
private NativeEventHandlerDelegate mEventHandlerDelegate = null;
/**
- * Looper associated with the thread that creates the AudioTrack instance
+ * Looper associated with the thread that creates the AudioTrack instance.
*/
private Looper mInitializationLooper = null;
/**
@@ -179,7 +181,7 @@ public class AudioTrack
*/
private int mSampleRate = 22050;
/**
- * The number of input audio channels (1 is mono, 2 is stereo)
+ * The number of input audio channels (1 is mono, 2 is stereo).
*/
private int mChannelCount = 1;
/**
@@ -194,7 +196,7 @@ public class AudioTrack
*/
private int mDataLoadMode = MODE_STREAM;
/**
- * The current audio channel configuration
+ * The current audio channel configuration.
*/
private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
/**
@@ -209,7 +211,7 @@ public class AudioTrack
// Used exclusively by native code
//--------------------
/**
- * Accessed by native methods: provides access to C++ AudioTrack object
+ * Accessed by native methods: provides access to C++ AudioTrack object.
*/
@SuppressWarnings("unused")
private int mNativeTrackInJavaObj;
@@ -227,17 +229,14 @@ public class AudioTrack
/**
* Class constructor.
* @param streamType the type of the audio stream. See
-
* {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
* {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC} and
* {@link AudioManager#STREAM_ALARM}
* @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but
* not limited to) 44100, 22050 and 11025.
* @param channelConfig describes the configuration of the audio channels.
-
* See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
* {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}
-
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
@@ -245,6 +244,9 @@ public class AudioTrack
* from for playback. If using the AudioTrack in streaming mode, you can write data into
* this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
* this is the maximum size of the sound that will be played for this instance.
+ * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
+ * for the successful creation of an AudioTrack instance in streaming mode. Using values
+ * smaller than getMinBufferSize() will result in an initialization failure.
* @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
* @throws java.lang.IllegalArgumentException
*/
@@ -423,8 +425,8 @@ public class AudioTrack
}
/**
- * Returns the current playback rate in Hz. Note that this rate may differ from one set using
- * {@link #setPlaybackRate(int)} as the value effectively set is implementation-dependent.
+ * Returns the current playback rate in Hz. Note that this rate may differ from the one set
+ * with {@link #setPlaybackRate(int)} as the value effectively used is implementation-dependent.
*/
public int getPlaybackRate() {
return native_get_playback_rate();
@@ -470,6 +472,9 @@ public class AudioTrack
* AudioTrack instance has been created to check if it was initialized
* properly. This ensures that the appropriate hardware resources have been
* acquired.
+ * @see #STATE_INITIALIZED
+ * @see #STATE_NO_STATIC_DATA
+ * @see #STATE_UNINITIALIZED
*/
public int getState() {
return mState;
@@ -486,28 +491,28 @@ public class AudioTrack
}
/**
- * Returns the native frame count used by the hardware
+ * Returns the native frame count used by the hardware.
*/
protected int getNativeFrameCount() {
return native_get_native_frame_count();
}
/**
- * @return marker position in frames
+ * Returns marker position expressed in frames.
*/
public int getNotificationMarkerPosition() {
return native_get_marker_pos();
}
/**
- * @return update period in frames
+ * Returns the notification update period expressed in frames.
*/
public int getPositionNotificationPeriod() {
return native_get_pos_update_period();
}
/**
- * @return playback head position in frames
+ * Returns the playback head position expressed in frames
*/
public int getPlaybackHeadPosition() {
return native_get_position();
@@ -522,7 +527,9 @@ public class AudioTrack
/**
* Returns the minimum buffer size required for the successful creation of an AudioTrack
- * object to be created in the {@link #MODE_STREAM} mode.
+ * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
+ * guarantee a smooth playback under load, and higher values should be chosen according to
+ * the expected frequency at which the buffer will be refilled with additional data to play.
* @param sampleRateInHz the sample rate expressed in Hertz.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
@@ -533,7 +540,7 @@ public class AudioTrack
* @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
* or {@link #ERROR} if the implementation was unable to query the hardware for its output
* properties,
- * or the minimum buffer size expressed in number of bytes.
+ * or the minimum buffer size expressed in bytes.
*/
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
@@ -577,13 +584,22 @@ public class AudioTrack
/**
* Sets the listener the AudioTrack notifies when a previously set marker is reached or
* for each periodic playback head position update.
+ * Notifications will be received in the same thread as the one in which the AudioTrack
+ * instance was created.
* @param listener
*/
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
setPlaybackPositionUpdateListener(listener, null);
}
-
+ /**
+ * Sets the listener the AudioTrack notifies when a previously set marker is reached or
+ * for each periodic playback head position update.
+ * Use this method to receive AudioTrack events in the Handler associated with another
+ * thread than the one in which you created the AudioTrack instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
Handler handler) {
synchronized (mPositionListenerLock) {
@@ -636,13 +652,17 @@ public class AudioTrack
* the audio data will be consumed and played back, not the original sampling rate of the
* content. Setting it to half the sample rate of the content will cause the playback to
* last twice as long, but will also result result in a negative pitch shift.
- * The current implementation supports a maximum sample rate of twice the hardware output
- * sample rate (see {@link #getNativeOutputSampleRate(int)}). Use {@link #getSampleRate()} to
- * check the rate actually used in hardware after potential clamping.
- * @param sampleRateInHz
+ * The current implementation supports a maximum sample rate of 64kHz.
+ * Use {@link #getSampleRate()} to check the rate actually used in hardware after
+ * potential clamping.
+ * @param sampleRateInHz the sample rate expressed in Hz
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
+ // FIXME: the implementation should support twice the hardware output sample rate
+ // (see {@link #getNativeOutputSampleRate(int)}), but currently
+ // due to the representation of the sample rate in the native layer, the sample rate
+ // is limited to 65535Hz
public int setPlaybackRate(int sampleRateInHz) {
if (mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
@@ -656,7 +676,7 @@ public class AudioTrack
/**
- *
+ * Sets the position of the notification marker.
* @param markerInFrames marker in frames
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
@@ -670,7 +690,8 @@ public class AudioTrack
/**
- * @param periodInFrames update period in frames
+ * Sets the period for the periodic notification event.
+ * @param periodInFrames update period expressed in frames
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
*/
public int setPositionNotificationPeriod(int periodInFrames) {
@@ -683,7 +704,7 @@ public class AudioTrack
/**
* Sets the playback head position. The track must be stopped for the position to be changed.
- * @param positionInFrames playback head position in frames
+ * @param positionInFrames playback head position expressed in frames
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -699,8 +720,8 @@ public class AudioTrack
/**
* Sets the loop points and the loop count. The loop can be infinite.
- * @param startInFrames loop start marker in frames
- * @param endInFrames loop end marker in frames
+ * @param startInFrames loop start marker expressed in frames
+ * @param endInFrames loop end marker expressed in frames
* @param loopCount the number of times the loop is looped.
* A value of -1 means infinite looping.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
@@ -797,7 +818,8 @@ public class AudioTrack
/**
* Writes the audio data to the audio hardware for playback.
* @param audioData the array that holds the data to play.
- * @param offsetInBytes the offset in audioData where the data to play starts.
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
+ * starts.
* @param sizeInBytes the number of bytes to read in audioData after the offset.
* @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
@@ -827,7 +849,8 @@ public class AudioTrack
/**
* Writes the audio data to the audio hardware for playback.
* @param audioData the array that holds the data to play.
- * @param offsetInShorts the offset in audioData where the data to play starts.
+ * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
+ * starts.
* @param sizeInShorts the number of bytes to read in audioData after the offset.
* @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
* if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java
index c9efac5..4fb0ead 100644
--- a/media/java/android/media/JetPlayer.java
+++ b/media/java/android/media/JetPlayer.java
@@ -30,8 +30,29 @@ import android.util.Log;
/**
* JetPlayer provides access to JET content playback and control.
- * <p>
- * Use <code>JetPlayer.getJetPlayer()</code> to get an instance of this class.
+ *
+ * <p>Please refer to the JET Creator User Manual for a presentation of the JET interactive
+ * music concept and how to use the JetCreator tool to create content to be player by JetPlayer.
+ *
+ * <p>Use of the JetPlayer class is based around the playback of a number of JET segments
+ * sequentially added to a playback FIFO queue. The rendering of the MIDI content stored in each
+ * segment can be dynamically affected by two mechanisms:
+ * <ul>
+ * <li>tracks in a segment can be muted or unmuted at any moment, individually or through
+ * a mask (to change the mute state of multiple tracks at once)</li>
+ * <li>parts of tracks in a segment can be played at predefined points in the segment, in order
+ * to maintain synchronization with the other tracks in the segment. This is achieved through
+ * the notion of "clips", which can be triggered at any time, but that will play only at the
+ * right time, as authored in the corresponding JET file.</li>
+ * </ul>
+ * As a result of the rendering and playback of the JET segments, the user of the JetPlayer instance
+ * can receive notifications from the JET engine relative to:
+ * <ul>
+ * <li>the playback state,</li>
+ * <li>the number of segments left to play in the queue,</li>
+ * <li>application controller events (CC80-83) to mark points in the MIDI segments.</li>
+ * </ul>
+ * Use {@link #getJetPlayer()} to construct a JetPlayer instance. JetPlayer is a singleton class.
*
*/
public class JetPlayer
@@ -40,7 +61,7 @@ public class JetPlayer
// Constants
//------------------------
/**
- * The maximum number of simultaneous tracks. Use __link #getMaxTracks()} to
+ * The maximum number of simultaneous tracks. Use {@link #getMaxTracks()} to
* access this value.
*/
private static int MAXTRACKS = 32;
@@ -107,6 +128,10 @@ public class JetPlayer
//--------------------------------------------
// Constructor, finalize
//------------------------
+ /**
+ * Factory method for the JetPlayer class.
+ * @return the singleton JetPlayer instance
+ */
public static JetPlayer getJetPlayer() {
if (singletonRef == null) {
singletonRef = new JetPlayer();
@@ -114,7 +139,9 @@ public class JetPlayer
return singletonRef;
}
-
+ /**
+ * Cloning a JetPlayer instance is not supported. Calling clone() will generate an exception.
+ */
public Object clone() throws CloneNotSupportedException {
// JetPlayer is a singleton class,
// so you can't clone a JetPlayer instance
@@ -149,6 +176,11 @@ public class JetPlayer
}
+ /**
+ * Stops the current JET playback, and releases all associated native resources.
+ * The object can no longer be used and the reference should be set to null
+ * after a call to release().
+ */
public void release() {
native_release();
}
@@ -158,7 +190,7 @@ public class JetPlayer
// Getters
//------------------------
/**
- * Returns the maximum number of simultaneous MIDI tracks supported by the Jet player
+ * Returns the maximum number of simultaneous MIDI tracks supported by JetPlayer
*/
public static int getMaxTracks() {
return JetPlayer.MAXTRACKS;
@@ -168,11 +200,21 @@ public class JetPlayer
//--------------------------------------------
// Jet functionality
//------------------------
+ /**
+ * Loads a .jet file from a given path.
+ * @param path the path to the .jet file, for instance "/sdcard/mygame/music.jet".
+ * @return true if loading the .jet file was successful, false if loading failed.
+ */
public boolean loadJetFile(String path) {
return native_loadJetFromFile(path);
}
+ /**
+ * Loads a .jet file from an asset file descriptor.
+ * @param afd the asset file descriptor.
+ * @return true if loading the .jet file was successful, false if loading failed.
+ */
public boolean loadJetFile(AssetFileDescriptor afd) {
long len = afd.getLength();
if (len < 0) {
@@ -182,22 +224,54 @@ public class JetPlayer
afd.getFileDescriptor(), afd.getStartOffset(), len);
}
-
+ /**
+ * Closes the resource containing the JET content.
+ * @return true if successfully closed, false otherwise.
+ */
public boolean closeJetFile() {
return native_closeJetFile();
}
+ /**
+ * Starts playing the JET segment queue.
+ * @return true if rendering and playback is successfully started, false otherwise.
+ */
public boolean play() {
return native_playJet();
}
+ /**
+ * Pauses the playback of the JET segment queue.
+ * @return true if rendering and playback is successfully paused, false otherwise.
+ */
public boolean pause() {
return native_pauseJet();
}
+ /**
+ * Queues the specified segment in the JET queue.
+ * @param segmentNum the identifier of the segment.
+ * @param libNum the index of the sound bank associated with the segment. Use -1 to indicate
+ * that no sound bank (DLS file) is associated with this segment, in which case JET will use
+ * the General MIDI library.
+ * @param repeatCount the number of times the segment will be repeated. 0 means the segment will
+ * only play once. -1 means the segment will repeat indefinitely.
+ * @param transpose the amount of pitch transposition. Set to 0 for normal playback.
+ * Range is -12 to +12.
+ * @param muteFlags a bitmask to specify which MIDI tracks will be muted during playback. Bit 0
+ * affects track 0, bit 1 affects track 1 etc.
+ * @param userID a value specified by the application that uniquely identifies the segment.
+ * this value is received in the
+ * {@link OnJetEventListener#onJetUserIdUpdate(JetPlayer, int, int)} event listener method.
+ * Normally, the application will keep a byte value that is incremented each time a new
+ * segment is queued up. This can be used to look up any special characteristics of that
+ * track including trigger clips and mute flags.
+ * @return true if the segment was successfully queued, false if the queue is full or if the
+ * parameters are invalid.
+ */
public boolean queueJetSegment(int segmentNum, int libNum, int repeatCount,
int transpose, int muteFlags, byte userID) {
return native_queueJetSegment(segmentNum, libNum, repeatCount,
@@ -205,6 +279,28 @@ public class JetPlayer
}
+ /**
+ * Queues the specified segment in the JET queue.
+ * @param segmentNum the identifier of the segment.
+ * @param libNum the index of the soundbank associated with the segment. Use -1 to indicate that
+ * no sound bank (DLS file) is associated with this segment, in which case JET will use
+ * the General MIDI library.
+ * @param repeatCount the number of times the segment will be repeated. 0 means the segment will
+ * only play once. -1 means the segment will repeat indefinitely.
+ * @param transpose the amount of pitch transposition. Set to 0 for normal playback.
+ * Range is -12 to +12.
+ * @param muteArray an array of booleans to specify which MIDI tracks will be muted during
+ * playback. The value at index 0 affects track 0, value at index 1 affects track 1 etc.
+ * The length of the array must be {@link #getMaxTracks()} for the call to succeed.
+ * @param userID a value specified by the application that uniquely identifies the segment.
+ * this value is received in the
+ * {@link OnJetEventListener#onJetUserIdUpdate(JetPlayer, int, int)} event listener method.
+ * Normally, the application will keep a byte value that is incremented each time a new
+ * segment is queued up. This can be used to look up any special characteristics of that
+ * track including trigger clips and mute flags.
+ * @return true if the segment was successfully queued, false if the queue is full or if the
+ * parameters are invalid.
+ */
public boolean queueJetSegmentMuteArray(int segmentNum, int libNum, int repeatCount,
int transpose, boolean[] muteArray, byte userID) {
if (muteArray.length != JetPlayer.getMaxTracks()) {
@@ -215,11 +311,32 @@ public class JetPlayer
}
+ /**
+ * Modifies the mute flags.
+ * @param muteFlags a bitmask to specify which MIDI tracks are muted. Bit 0 affects track 0,
+ * bit 1 affects track 1 etc.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flags will be updated at the start of the
+ * next segment. If the segment is repeated, the flags will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flags were successfully updated, false otherwise.
+ */
public boolean setMuteFlags(int muteFlags, boolean sync) {
return native_setMuteFlags(muteFlags, sync);
}
+ /**
+ * Modifies the mute flags for the current active segment.
+ * @param muteArray an array of booleans to specify which MIDI tracks are muted. The value at
+ * index 0 affects track 0, value at index 1 affects track 1 etc.
+ * The length of the array must be {@link #getMaxTracks()} for the call to succeed.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flags will be updated at the start of the
+ * next segment. If the segment is repeated, the flags will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flags were successfully updated, false otherwise.
+ */
public boolean setMuteArray(boolean[] muteArray, boolean sync) {
if(muteArray.length != JetPlayer.getMaxTracks())
return false;
@@ -227,16 +344,41 @@ public class JetPlayer
}
+ /**
+ * Mutes or unmutes a single track.
+ * @param trackId the index of the track to mute.
+ * @param muteFlag set to true to mute, false to unmute.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flag will be updated at the start of the
+ * next segment. If the segment is repeated, the flag will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flag was successfully updated, false otherwise.
+ */
public boolean setMuteFlag(int trackId, boolean muteFlag, boolean sync) {
return native_setMuteFlag(trackId, muteFlag, sync);
}
+ /**
+ * Schedules the playback of a clip.
+ * This will automatically update the mute flags in sync with the JET Clip Marker (controller
+ * 103). The parameter clipID must be in the range of 0-63. After the call to triggerClip, when
+ * JET next encounters a controller event 103 with bits 0-5 of the value equal to clipID and
+ * bit 6 set to 1, it will automatically unmute the track containing the controller event.
+ * When JET encounters the complementary controller event 103 with bits 0-5 of the value equal
+ * to clipID and bit 6 set to 0, it will mute the track again.
+ * @param clipId the identifier of the clip to trigger.
+ * @return true if the clip was successfully triggered, false otherwise.
+ */
public boolean triggerClip(int clipId) {
return native_triggerClip(clipId);
}
+ /**
+ * Empties the segment queue, and clears all clips that are scheduled for playback.
+ * @return true if the queue was successfully cleared, false otherwise.
+ */
public boolean clearQueue() {
return native_clearQueue();
}
@@ -302,10 +444,25 @@ public class JetPlayer
//--------------------------------------------
// Jet event listener
//------------------------
+ /**
+ * Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
+ * playback engine.
+ * Notifications will be received in the same thread as the one in which the JetPlayer
+ * instance was created.
+ * @param listener
+ */
public void setEventListener(OnJetEventListener listener) {
setEventListener(listener, null);
}
+ /**
+ * Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
+ * playback engine.
+ * Use this method to receive JET events in the Handler associated with another
+ * thread than the one in which you created the JetPlayer instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
public void setEventListener(OnJetEventListener listener, Handler handler) {
synchronized(mEventListenerLock) {
@@ -343,7 +500,7 @@ public class JetPlayer
void onJetEvent(JetPlayer player,
short segment, byte track, byte channel, byte controller, byte value);
/**
- * Callback for when JET's currently playing segment userID is updated.
+ * Callback for when JET's currently playing segment's userID is updated.
*
* @param player the JET player the status update is coming from
* @param userId the ID of the currently playing segment
@@ -363,7 +520,7 @@ public class JetPlayer
* Callback for when JET pause state is updated.
*
* @param player the JET player the status update is coming from
- * @param paused indicates whether JET is paused or not
+ * @param paused indicates whether JET is paused (1) or not (0)
*/
void onJetPauseUpdate(JetPlayer player, int paused);
}
diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java
index f05842d..8be11df 100644
--- a/media/java/android/media/MediaFile.java
+++ b/media/java/android/media/MediaFile.java
@@ -31,7 +31,7 @@ import java.util.Iterator;
*/
public class MediaFile {
// comma separated list of all file extensions supported by the media scanner
- public static String sFileExtensions;
+ public final static String sFileExtensions;
// Audio file types
public static final int FILE_TYPE_MP3 = 1;
@@ -93,7 +93,7 @@ public class MediaFile {
= new HashMap<String, Integer>();
static void addFileType(String extension, int fileType, String mimeType) {
sFileTypeMap.put(extension, new MediaFileType(fileType, mimeType));
- sMimeTypeMap.put(mimeType, new Integer(fileType));
+ sMimeTypeMap.put(mimeType, Integer.valueOf(fileType));
}
static {
addFileType("MP3", FILE_TYPE_MP3, "audio/mpeg");
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index fc8476d..ae3e181 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -453,11 +453,12 @@ public class MediaScanner
FileCacheEntry entry = beginFile(path, mimeType, lastModified, fileSize);
// rescan for metadata if file was modified since last scan
if (entry != null && (entry.mLastModifiedChanged || scanAlways)) {
- boolean ringtones = (path.indexOf(RINGTONES_DIR) > 0);
- boolean notifications = (path.indexOf(NOTIFICATIONS_DIR) > 0);
- boolean alarms = (path.indexOf(ALARMS_DIR) > 0);
- boolean podcasts = (path.indexOf(PODCAST_DIR) > 0);
- boolean music = (path.indexOf(MUSIC_DIR) > 0) ||
+ String lowpath = path.toLowerCase();
+ boolean ringtones = (lowpath.indexOf(RINGTONES_DIR) > 0);
+ boolean notifications = (lowpath.indexOf(NOTIFICATIONS_DIR) > 0);
+ boolean alarms = (lowpath.indexOf(ALARMS_DIR) > 0);
+ boolean podcasts = (lowpath.indexOf(PODCAST_DIR) > 0);
+ boolean music = (lowpath.indexOf(MUSIC_DIR) > 0) ||
(!ringtones && !notifications && !alarms && !podcasts);
if (mFileType == MediaFile.FILE_TYPE_MP3 ||
diff --git a/media/java/android/media/SoundPool.java b/media/java/android/media/SoundPool.java
index ab3274b..3803d9d 100644
--- a/media/java/android/media/SoundPool.java
+++ b/media/java/android/media/SoundPool.java
@@ -46,6 +46,19 @@ import java.io.IOException;
* number of streams helps to cap CPU loading and reducing the likelihood that
* audio mixing will impact visuals or UI performance.</p>
*
+ * <p>Sounds can be looped by setting a non-zero loop value. A value of -1
+ * causes the sound to loop forever. In this case, the application must
+ * explicitly call the stop() function to stop the sound. Any other non-zero
+ * value will cause the sound to repeat the specified number of times, e.g.
+ * a value of 3 causes the sound to play a total of 4 times.</p>
+ *
+ * <p>The playback rate can also be changed. A playback rate of 1.0 causes
+ * the sound to play at its original frequency (resampled, if necessary,
+ * to the hardware output frequency). A playback rate of 2.0 causes the
+ * sound to play at twice its original frequency, and a playback rate of
+ * 0.5 causes it to play at half its original frequency. The playback
+ * rate range is 0.5 to 2.0.</p>
+ *
* <p>Priority runs low to high, i.e. higher numbers are higher priority.
* Priority is used when a call to play() would cause the number of active
* streams to exceed the value established by the maxStreams parameter when
@@ -72,6 +85,13 @@ import java.io.IOException;
* adjusting the playback rate in real-time for doppler or synthesis
* effects.</p>
*
+ * <p>Note that since streams can be stopped due to resource constraints, the
+ * streamID is a reference to a particular instance of a stream. If the stream
+ * is stopped to allow a higher priority stream to play, the stream is no
+ * longer be valid. However, the application is allowed to call methods on
+ * the streamID without error. This may help simplify program logic since
+ * the application need not concern itself with the stream lifecycle.</p>
+ *
* <p>In our example, when the player has completed the level, the game
* logic should call SoundPool.release() to release all the native resources
* in use and then set the SoundPool reference to null. If the player starts
@@ -104,10 +124,11 @@ public class SoundPool
}
/**
- * Load the sound from the specified path
- *
+ * Load the sound from the specified path.
+ *
* @param path the path to the audio file
- * @param priority the priority of the sound. Currently has no effect.
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(String path, int priority)
@@ -128,22 +149,25 @@ public class SoundPool
fd.close();
}
}
- } catch (java.io.IOException e) {}
+ } catch (java.io.IOException e) {
+ Log.d(TAG, "error loading " + path);
+ }
return id;
}
/**
- * Load the sound from the specified APK resource
+ * Load the sound from the specified APK resource.
*
- * <p>Note that the extension is dropped. For example, if you want to load
+ * Note that the extension is dropped. For example, if you want to load
* a sound from the raw resource file "explosion.mp3", you would specify
* "R.raw.explosion" as the resource ID. Note that this means you cannot
* have both an "explosion.wav" and an "explosion.mp3" in the res/raw
- * directory.</p>
+ * directory.
*
* @param context the application context
* @param resId the resource ID
- * @param priority the priority of the sound. Currently has no effect.
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(Context context, int resId, int priority) {
@@ -162,10 +186,11 @@ public class SoundPool
}
/**
- * Load the sound from an asset file descriptor
+ * Load the sound from an asset file descriptor.
*
* @param afd an asset file descriptor
- * @param priority the priority of the sound. Currently has no effect.
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(AssetFileDescriptor afd, int priority) {
@@ -181,16 +206,17 @@ public class SoundPool
}
/**
- * Load the sound from a FileDescriptor
+ * Load the sound from a FileDescriptor.
*
- * <p>This version is useful if you store multiple sounds in a single
+ * This version is useful if you store multiple sounds in a single
* binary. The offset specifies the offset from the start of the file
- * and the length specifies the length of the sound within the file.</p>
+ * and the length specifies the length of the sound within the file.
*
* @param fd a FileDescriptor object
* @param offset offset to the start of the sound
* @param length length of the sound
- * @param priority the priority of the sound. Currently has no effect.
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(FileDescriptor fd, long offset, long length, int priority) {
@@ -202,11 +228,11 @@ public class SoundPool
private native final int _load(FileDescriptor fd, long offset, long length, int priority);
/**
- * Unload a sound from a sound ID
+ * Unload a sound from a sound ID.
*
- * <p>Unloads the sound specified by the soundID. This is the value
+ * Unloads the sound specified by the soundID. This is the value
* returned by the load() function. Returns true if the sound is
- * successfully unloaded, false if the sound was already unloaded.</p>
+ * successfully unloaded, false if the sound was already unloaded.
*
* @param soundID a soundID returned by the load() function
* @return true if just unloaded, false if previously unloaded
@@ -214,66 +240,77 @@ public class SoundPool
public native final boolean unload(int soundID);
/**
- * Play a sound from a sound ID
+ * Play a sound from a sound ID.
*
- * <p>Play the sound specified by the soundID. This is the value
+ * Play the sound specified by the soundID. This is the value
* returned by the load() function. Returns a non-zero streamID
* if successful, zero if it fails. The streamID can be used to
* further control playback. Note that calling play() may cause
* another sound to stop playing if the maximum number of active
- * streams is exceeded.</p>
+ * streams is exceeded. A loop value of -1 means loop forever,
+ * a value of 0 means don't loop, other values indicate the
+ * number of repeats, e.g. a value of 1 plays the audio twice.
+ * The playback rate allows the application to vary the playback
+ * rate (pitch) of the sound. A value of 1.0 means play back at
+ * the original frequency. A value of 2.0 means play back twice
+ * as fast, and a value of 0.5 means playback at half speed.
*
* @param soundID a soundID returned by the load() function
+ * @param leftVolume left volume value (range = 0.0 to 1.0)
+ * @param rightVolume right volume value (range = 0.0 to 1.0)
+ * @param priority stream priority (0 = lowest priority)
+ * @param loop loop mode (0 = no loop, -1 = loop forever)
+ * @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
* @return non-zero streamID if successful, zero if failed
*/
public native final int play(int soundID, float leftVolume, float rightVolume,
int priority, int loop, float rate);
/**
- * Pause a playback stream
+ * Pause a playback stream.
*
- * <p>Pause the stream specified by the streamID. This is the
+ * Pause the stream specified by the streamID. This is the
* value returned by the play() function. If the stream is
* playing, it will be paused. If the stream is not playing
* (e.g. is stopped or was previously paused), calling this
- * function will have no effect.</p>
+ * function will have no effect.
*
* @param streamID a streamID returned by the play() function
*/
public native final void pause(int streamID);
/**
- * Resume a playback stream
+ * Resume a playback stream.
*
- * <p>Resume the stream specified by the streamID. This
+ * Resume the stream specified by the streamID. This
* is the value returned by the play() function. If the stream
* is paused, this will resume playback. If the stream was not
- * previously paused, calling this function will have no effect.</p>
+ * previously paused, calling this function will have no effect.
*
* @param streamID a streamID returned by the play() function
*/
public native final void resume(int streamID);
/**
- * Stop a playback stream
+ * Stop a playback stream.
*
- * <p>Stop the stream specified by the streamID. This
+ * Stop the stream specified by the streamID. This
* is the value returned by the play() function. If the stream
* is playing, it will be stopped. It also releases any native
* resources associated with this stream. If the stream is not
- * playing, it will have no effect.</p>
+ * playing, it will have no effect.
*
* @param streamID a streamID returned by the play() function
*/
public native final void stop(int streamID);
/**
- * Set stream volume
+ * Set stream volume.
*
- * <p>Sets the volume on the stream specified by the streamID.
+ * Sets the volume on the stream specified by the streamID.
* This is the value returned by the play() function. The
* value must be in the range of 0.0 to 1.0. If the stream does
- * not exist, it will have no effect.</p>
+ * not exist, it will have no effect.
*
* @param streamID a streamID returned by the play() function
* @param leftVolume left volume value (range = 0.0 to 1.0)
@@ -283,29 +320,51 @@ public class SoundPool
float leftVolume, float rightVolume);
/**
- * Change stream priority
+ * Change stream priority.
*
- * <p>Change the priority of the stream specified by the streamID.
+ * Change the priority of the stream specified by the streamID.
* This is the value returned by the play() function. Affects the
- * order in which streams are re-used to play new sounds.
+ * order in which streams are re-used to play new sounds. If the
+ * stream does not exist, it will have no effect.
*
* @param streamID a streamID returned by the play() function
*/
public native final void setPriority(int streamID, int priority);
/**
- * Change stream priority
+ * Set loop mode.
*
- * <p>Change the priority of the stream specified by the streamID.
- * This is the value returned by the play() function. Affects the
- * order in which streams are re-used to play new sounds.
+ * Change the loop mode. A loop value of -1 means loop forever,
+ * a value of 0 means don't loop, other values indicate the
+ * number of repeats, e.g. a value of 1 plays the audio twice.
+ * If the stream does not exist, it will have no effect.
*
* @param streamID a streamID returned by the play() function
+ * @param loop loop mode (0 = no loop, -1 = loop forever)
*/
public native final void setLoop(int streamID, int loop);
+ /**
+ * Change playback rate.
+ *
+ * The playback rate allows the application to vary the playback
+ * rate (pitch) of the sound. A value of 1.0 means playback at
+ * the original frequency. A value of 2.0 means playback twice
+ * as fast, and a value of 0.5 means playback at half speed.
+ * If the stream does not exist, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ * @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
+ */
public native final void setRate(int streamID, float rate);
+ /**
+ * Release the SoundPool resources.
+ *
+ * Release all memory and native resources used by the SoundPool
+ * object. The SoundPool can no longer be used and the reference
+ * should be set to null.
+ */
public native final void release();
private native final void native_setup(Object mediaplayer_this,
diff --git a/media/java/android/media/ToneGenerator.java b/media/java/android/media/ToneGenerator.java
index 0901fbf..4b53756 100644
--- a/media/java/android/media/ToneGenerator.java
+++ b/media/java/android/media/ToneGenerator.java
@@ -130,25 +130,35 @@ public class ToneGenerator
*/
public static final int TONE_DTMF_D = 15;
/**
- * Call supervisory tone, Dial tone: 425Hz, continuous
- *
+ * Call supervisory tone, Dial tone:
+ * CEPT: 425Hz, continuous
+ * ANSI (IS-95): 350Hz+440Hz, continuous
+ * JAPAN: 400Hz, continuous
+ *
* @see #ToneGenerator(int, int)
*/
public static final int TONE_SUP_DIAL = 16;
/**
- * Call supervisory tone, Busy: 425Hz, 500ms ON, 500ms OFF...
- *
+ * Call supervisory tone, Busy:
+ * CEPT: 425Hz, 500ms ON, 500ms OFF...
+ * ANSI (IS-95): 480Hz+620Hz, 500ms ON, 500ms OFF...
+ * JAPAN: 400Hz, 500ms ON, 500ms OFF...
+ *
* @see #ToneGenerator(int, int)
*/
public static final int TONE_SUP_BUSY = 17;
/**
- * Call supervisory tone, Congestion: 425Hz, 200ms ON, 200ms OFF...
+ * Call supervisory tone, Congestion:
+ * CEPT, JAPAN: 425Hz, 200ms ON, 200ms OFF...
+ * ANSI (IS-95): 480Hz+620Hz, 250ms ON, 250ms OFF...
*
* @see #ToneGenerator(int, int)
*/
public static final int TONE_SUP_CONGESTION = 18;
/**
- * Call supervisory tone, Radio path acknowlegment : 425Hz, 200ms ON
+ * Call supervisory tone, Radio path acknowlegment :
+ * CEPT, ANSI: 425Hz, 200ms ON
+ * JAPAN: 400Hz, 1s ON, 2s OFF...
*
* @see #ToneGenerator(int, int)
*/
@@ -166,13 +176,17 @@ public class ToneGenerator
*/
public static final int TONE_SUP_ERROR = 21;
/**
- * Call supervisory tone, Call Waiting: 425Hz, 200ms ON, 600ms OFF, 200ms ON, 3s OFF...
+ * Call supervisory tone, Call Waiting:
+ * CEPT, JAPAN: 425Hz, 200ms ON, 600ms OFF, 200ms ON, 3s OFF...
+ * ANSI (IS-95): 440 Hz, 300 ms ON, 9.7 s OFF, (100 ms ON, 100 ms OFF, 100 ms ON, 9.7s OFF ...)
*
* @see #ToneGenerator(int, int)
*/
public static final int TONE_SUP_CALL_WAITING = 22;
/**
- * Call supervisory tone, Ring Tone: 425Hz, 1s ON, 4s OFF...
+ * Call supervisory tone, Ring Tone:
+ * CEPT, JAPAN: 425Hz, 1s ON, 4s OFF...
+ * ANSI (IS-95): 440Hz + 480Hz, 2s ON, 4s OFF...
*
* @see #ToneGenerator(int, int)
*/
@@ -207,6 +221,37 @@ public class ToneGenerator
* @see #ToneGenerator(int, int)
*/
public static final int TONE_PROP_BEEP2 = 28;
+ /**
+ * Call supervisory tone (IS-95), intercept tone: alternating 440 Hz and 620 Hz tones, each on for 250 ms
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_INTERCEPT = 29;
+ /**
+ * Call supervisory tone (IS-95), abbreviated intercept: intercept tone limited to 4 seconds
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_INTERCEPT_ABBREV = 30;
+ /**
+ * Call supervisory tone (IS-95), abbreviated congestion: congestion tone limited to 4 seconds
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CONGESTION_ABBREV = 31;
+ /**
+ * Call supervisory tone (IS-95), confirm tone: a 350 Hz tone added to a 440 Hz tone repeated 3 times in a 100 ms on, 100 ms off cycle
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CONFIRM = 32;
+ /**
+ * Call supervisory tone (IS-95), pip tone: four bursts of 480 Hz tone (0.1 s on, 0.1 s off).
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_PIP = 33;
+
/** Maximum volume, for use with {@link #ToneGenerator(int,int)} */
public static final int MAX_VOLUME = AudioSystem.MAX_VOLUME;
@@ -258,6 +303,11 @@ public class ToneGenerator
* <li>{@link #TONE_PROP_NACK}
* <li>{@link #TONE_PROP_PROMPT}
* <li>{@link #TONE_PROP_BEEP2}
+ * <li>{@link #TONE_SUP_INTERCEPT}
+ * <li>{@link #TONE_SUP_INTERCEPT_ABBREV}
+ * <li>{@link #TONE_SUP_CONGESTION_ABBREV}
+ * <li>{@link #TONE_SUP_CONFIRM}
+ * <li>{@link #TONE_SUP_PIP}
* </ul>
* @see #ToneGenerator(int, int)
*/