diff options
author | Jean-Baptiste Queru <jbq@google.com> | 2009-11-12 18:45:53 -0800 |
---|---|---|
committer | Jean-Baptiste Queru <jbq@google.com> | 2009-11-13 13:53:39 -0800 |
commit | 9db3d07b9620b4269ab33f78604a36327e536ce1 (patch) | |
tree | 41e294f34b9695187af098cd42167489fb0c8fb0 /media/java | |
parent | 6c63ee4fc4acae4bbbbd2a49e0a68206221f0de0 (diff) | |
download | frameworks_base-9db3d07b9620b4269ab33f78604a36327e536ce1.zip frameworks_base-9db3d07b9620b4269ab33f78604a36327e536ce1.tar.gz frameworks_base-9db3d07b9620b4269ab33f78604a36327e536ce1.tar.bz2 |
eclair snapshot
Diffstat (limited to 'media/java')
21 files changed, 2672 insertions, 977 deletions
diff --git a/media/java/android/media/AsyncPlayer.java b/media/java/android/media/AsyncPlayer.java index 35f0409..e1e09b9 100644 --- a/media/java/android/media/AsyncPlayer.java +++ b/media/java/android/media/AsyncPlayer.java @@ -19,10 +19,12 @@ package android.media; import android.content.Context; import android.net.Uri; import android.os.PowerManager; +import android.os.SystemClock; import android.util.Log; import java.io.IOException; import java.lang.IllegalStateException; +import java.util.LinkedList; /** * Plays a series of audio URIs, but does all the hard work on another thread @@ -31,14 +33,15 @@ import java.lang.IllegalStateException; public class AsyncPlayer { private static final int PLAY = 1; private static final int STOP = 2; + private static final boolean mDebug = false; private static final class Command { - Command next; int code; Context context; Uri uri; boolean looping; int stream; + long requestTime; public String toString() { return "{ code=" + code + " looping=" + looping + " stream=" + stream @@ -46,6 +49,36 @@ public class AsyncPlayer { } } + private LinkedList<Command> mCmdQueue = new LinkedList(); + + private void startSound(Command cmd) { + // Preparing can be slow, so if there is something else + // is playing, let it continue until we're done, so there + // is less of a glitch. + try { + if (mDebug) Log.d(mTag, "Starting playback"); + MediaPlayer player = new MediaPlayer(); + player.setAudioStreamType(cmd.stream); + player.setDataSource(cmd.context, cmd.uri); + player.setLooping(cmd.looping); + player.prepare(); + player.start(); + if (mPlayer != null) { + mPlayer.release(); + } + mPlayer = player; + long delay = SystemClock.uptimeMillis() - cmd.requestTime; + if (delay > 1000) { + Log.w(mTag, "Notification sound delayed by " + delay + "msecs"); + } + } + catch (IOException e) { + Log.w(mTag, "error loading sound for " + cmd.uri, e); + } catch (IllegalStateException e) { + Log.w(mTag, "IllegalStateException (content provider died?) " + cmd.uri, e); + } + } + private final class Thread extends java.lang.Thread { Thread() { super("AsyncPlayer-" + mTag); @@ -55,41 +88,23 @@ public class AsyncPlayer { while (true) { Command cmd = null; - synchronized (mLock) { - if (mHead != null) { - cmd = mHead; - mHead = cmd.next; - if (mTail == cmd) { - mTail = null; - } - } + synchronized (mCmdQueue) { + if (mDebug) Log.d(mTag, "RemoveFirst"); + cmd = mCmdQueue.removeFirst(); } switch (cmd.code) { case PLAY: - try { - // Preparing can be slow, so if there is something else - // is playing, let it continue until we're done, so there - // is less of a glitch. - MediaPlayer player = new MediaPlayer(); - player.setAudioStreamType(cmd.stream); - player.setDataSource(cmd.context, cmd.uri); - player.setLooping(cmd.looping); - player.prepare(); - player.start(); - if (mPlayer != null) { - mPlayer.release(); - } - mPlayer = player; - } - catch (IOException e) { - Log.w(mTag, "error loading sound for " + cmd.uri, e); - } catch (IllegalStateException e) { - Log.w(mTag, "IllegalStateException (content provider died?) " + cmd.uri, e); - } + if (mDebug) Log.d(mTag, "PLAY"); + startSound(cmd); break; case STOP: + if (mDebug) Log.d(mTag, "STOP"); if (mPlayer != null) { + long delay = SystemClock.uptimeMillis() - cmd.requestTime; + if (delay > 1000) { + Log.w(mTag, "Notification stop delayed by " + delay + "msecs"); + } mPlayer.stop(); mPlayer.release(); mPlayer = null; @@ -99,8 +114,8 @@ public class AsyncPlayer { break; } - synchronized (mLock) { - if (mHead == null) { + synchronized (mCmdQueue) { + if (mCmdQueue.size() == 0) { // nothing left to do, quit // doing this check after we're done prevents the case where they // added it during the operation from spawning two threads and @@ -115,11 +130,8 @@ public class AsyncPlayer { } private String mTag; - private Command mHead; - private Command mTail; private Thread mThread; private MediaPlayer mPlayer; - private Object mLock = new Object(); private PowerManager.WakeLock mWakeLock; // The current state according to the caller. Reality lags behind @@ -154,12 +166,13 @@ public class AsyncPlayer { */ public void play(Context context, Uri uri, boolean looping, int stream) { Command cmd = new Command(); + cmd.requestTime = SystemClock.uptimeMillis(); cmd.code = PLAY; cmd.context = context; cmd.uri = uri; cmd.looping = looping; cmd.stream = stream; - synchronized (mLock) { + synchronized (mCmdQueue) { enqueueLocked(cmd); mState = PLAY; } @@ -170,11 +183,12 @@ public class AsyncPlayer { * at this point. Calling this multiple times has no ill effects. */ public void stop() { - synchronized (mLock) { + synchronized (mCmdQueue) { // This check allows stop to be called multiple times without starting // a thread that ends up doing nothing. if (mState != STOP) { Command cmd = new Command(); + cmd.requestTime = SystemClock.uptimeMillis(); cmd.code = STOP; enqueueLocked(cmd); mState = STOP; @@ -183,12 +197,7 @@ public class AsyncPlayer { } private void enqueueLocked(Command cmd) { - if (mTail == null) { - mHead = cmd; - } else { - mTail.next = cmd; - } - mTail = cmd; + mCmdQueue.add(cmd); if (mThread == null) { acquireWakeLock(); mThread = new Thread(); diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java index 0732b61..b3aae72 100644 --- a/media/java/android/media/AudioFormat.java +++ b/media/java/android/media/AudioFormat.java @@ -37,15 +37,61 @@ public class AudioFormat { public static final int ENCODING_PCM_8BIT = 3; // accessed by native code /** Invalid audio channel configuration */ - public static final int CHANNEL_CONFIGURATION_INVALID = 0; + /** @deprecated use CHANNEL_INVALID instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0; /** Default audio channel configuration */ - public static final int CHANNEL_CONFIGURATION_DEFAULT = 1; + /** @deprecated use CHANNEL_OUT_DEFAULT or CHANNEL_IN_DEFAULT instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1; /** Mono audio configuration */ - public static final int CHANNEL_CONFIGURATION_MONO = 2; + /** @deprecated use CHANNEL_OUT_MONO or CHANNEL_IN_MONO instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2; /** Stereo (2 channel) audio configuration */ - public static final int CHANNEL_CONFIGURATION_STEREO = 3; + /** @deprecated use CHANNEL_OUT_STEREO or CHANNEL_IN_STEREO instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3; -} + /** Invalid audio channel mask */ + public static final int CHANNEL_INVALID = 0; + /** Default audio channel mask */ + public static final int CHANNEL_OUT_DEFAULT = 1; + // Channel mask definitions must be kept in sync with native values in include/media/AudioSystem.h + public static final int CHANNEL_OUT_FRONT_LEFT = 0x4; + public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8; + public static final int CHANNEL_OUT_FRONT_CENTER = 0x10; + public static final int CHANNEL_OUT_LOW_FREQUENCY = 0x20; + public static final int CHANNEL_OUT_BACK_LEFT = 0x40; + public static final int CHANNEL_OUT_BACK_RIGHT = 0x80; + public static final int CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100; + public static final int CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200; + public static final int CHANNEL_OUT_BACK_CENTER = 0x400; + public static final int CHANNEL_OUT_MONO = CHANNEL_OUT_FRONT_LEFT; + public static final int CHANNEL_OUT_STEREO = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT); + public static final int CHANNEL_OUT_QUAD = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); + public static final int CHANNEL_OUT_SURROUND = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER); + public static final int CHANNEL_OUT_5POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); + public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT | + CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER); + public static final int CHANNEL_IN_DEFAULT = 1; + public static final int CHANNEL_IN_LEFT = 0x4; + public static final int CHANNEL_IN_RIGHT = 0x8; + public static final int CHANNEL_IN_FRONT = 0x10; + public static final int CHANNEL_IN_BACK = 0x20; + public static final int CHANNEL_IN_LEFT_PROCESSED = 0x40; + public static final int CHANNEL_IN_RIGHT_PROCESSED = 0x80; + public static final int CHANNEL_IN_FRONT_PROCESSED = 0x100; + public static final int CHANNEL_IN_BACK_PROCESSED = 0x200; + public static final int CHANNEL_IN_PRESSURE = 0x400; + public static final int CHANNEL_IN_X_AXIS = 0x800; + public static final int CHANNEL_IN_Y_AXIS = 0x1000; + public static final int CHANNEL_IN_Z_AXIS = 0x2000; + public static final int CHANNEL_IN_VOICE_UPLINK = 0x4000; + public static final int CHANNEL_IN_VOICE_DNLINK = 0x8000; + public static final int CHANNEL_IN_MONO = CHANNEL_IN_FRONT; + public static final int CHANNEL_IN_STEREO = (CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT); +} diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java index a65a417..bb16215a 100644 --- a/media/java/android/media/AudioManager.java +++ b/media/java/android/media/AudioManager.java @@ -140,33 +140,31 @@ public class AudioManager { public static final int STREAM_NOTIFICATION = AudioSystem.STREAM_NOTIFICATION; /** @hide The audio stream for phone calls when connected to bluetooth */ public static final int STREAM_BLUETOOTH_SCO = AudioSystem.STREAM_BLUETOOTH_SCO; + /** @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */ + public static final int STREAM_SYSTEM_ENFORCED = AudioSystem.STREAM_SYSTEM_ENFORCED; + /** The audio stream for DTMF Tones */ + public static final int STREAM_DTMF = AudioSystem.STREAM_DTMF; + /** @hide The audio stream for text to speech (TTS) */ + public static final int STREAM_TTS = AudioSystem.STREAM_TTS; /** Number of audio streams */ /** * @deprecated Use AudioSystem.getNumStreamTypes() instead */ - public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS; + @Deprecated public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS; - /** @hide Maximum volume index values for audio streams */ - public static final int[] MAX_STREAM_VOLUME = new int[] { - 6, // STREAM_VOICE_CALL - 8, // STREAM_SYSTEM - 8, // STREAM_RING - 16, // STREAM_MUSIC - 8, // STREAM_ALARM - 8, // STREAM_NOTIFICATION - 16, // STREAM_BLUETOOTH_SCO - }; - /** @hide Default volume index values for audio streams */ public static final int[] DEFAULT_STREAM_VOLUME = new int[] { 4, // STREAM_VOICE_CALL - 5, // STREAM_SYSTEM + 7, // STREAM_SYSTEM 5, // STREAM_RING 11, // STREAM_MUSIC 6, // STREAM_ALARM 5, // STREAM_NOTIFICATION - 7 // STREAM_BLUETOOTH_SCO + 7, // STREAM_BLUETOOTH_SCO + 7, // STREAM_SYSTEM_ENFORCED + 11, // STREAM_DTMF + 11 // STREAM_TTS }; /** @@ -347,6 +345,9 @@ public class AudioManager { /** * Adjusts the volume of a particular stream by one step in a direction. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param streamType The stream type to adjust. One of {@link #STREAM_VOICE_CALL}, * {@link #STREAM_SYSTEM}, {@link #STREAM_RING}, {@link #STREAM_MUSIC} or @@ -372,6 +373,9 @@ public class AudioManager { * active, it will have the highest priority regardless of if the in-call * screen is showing. Another example, if music is playing in the background * and a call is not active, the music stream will be adjusted. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param direction The direction to adjust the volume. One of * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or @@ -393,6 +397,9 @@ public class AudioManager { /** * Adjusts the volume of the most relevant stream, or the given fallback * stream. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param direction The direction to adjust the volume. One of * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or @@ -543,6 +550,9 @@ public class AudioManager { * <p> * For a better user experience, applications MUST unmute a muted stream * in onPause() and mute is again in onResume() if appropriate. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param streamType The stream to be muted/unmuted. * @param state The required mute state: true for mute ON, false for mute OFF @@ -610,6 +620,9 @@ public class AudioManager { /** * Sets the setting for when the vibrate type should vibrate. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param vibrateType The type of vibrate. One of * {@link #VIBRATE_TYPE_NOTIFICATION} or @@ -632,14 +645,20 @@ public class AudioManager { /** * Sets the speakerphone on or off. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param on set <var>true</var> to turn on speakerphone; * <var>false</var> to turn it off */ public void setSpeakerphoneOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_SPEAKER: 0, ROUTE_SPEAKER); + IAudioService service = getService(); + try { + service.setSpeakerphoneOn(on); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in setSpeakerphoneOn", e); + } } /** @@ -648,41 +667,55 @@ public class AudioManager { * @return true if speakerphone is on, false if it's off */ public boolean isSpeakerphoneOn() { - return (getRoutingP(MODE_IN_CALL) & ROUTE_SPEAKER) == 0 ? false : true; + IAudioService service = getService(); + try { + return service.isSpeakerphoneOn(); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in isSpeakerphoneOn", e); + return false; + } } /** - * Sets audio routing to the Bluetooth headset on or off. + * Request use of Bluetooth SCO headset for communications. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * - * @param on set <var>true</var> to route SCO (voice) audio to/from Bluetooth - * headset; <var>false</var> to route audio to/from phone earpiece + * @param on set <var>true</var> to use bluetooth SCO for communications; + * <var>false</var> to not use bluetooth SCO for communications */ public void setBluetoothScoOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_SCO: 0, ROUTE_BLUETOOTH_SCO); + IAudioService service = getService(); + try { + service.setBluetoothScoOn(on); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in setBluetoothScoOn", e); + } } /** - * Checks whether audio routing to the Bluetooth headset is on or off. + * Checks whether communications use Bluetooth SCO. * - * @return true if SCO audio is being routed to/from Bluetooth headset; + * @return true if SCO is used for communications; * false if otherwise */ public boolean isBluetoothScoOn() { - return (getRoutingP(MODE_IN_CALL) & ROUTE_BLUETOOTH_SCO) == 0 ? false : true; + IAudioService service = getService(); + try { + return service.isBluetoothScoOn(); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in isBluetoothScoOn", e); + return false; + } } /** - * Sets A2DP audio routing to the Bluetooth headset on or off. - * * @param on set <var>true</var> to route A2DP audio to/from Bluetooth * headset; <var>false</var> disable A2DP audio + * @deprecated Do not use. */ - public void setBluetoothA2dpOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP); + @Deprecated public void setBluetoothA2dpOn(boolean on){ } /** @@ -692,7 +725,12 @@ public class AudioManager { * false if otherwise */ public boolean isBluetoothA2dpOn() { - return (getRoutingP(MODE_NORMAL) & ROUTE_BLUETOOTH_A2DP) == 0 ? false : true; + if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,"") + == AudioSystem.DEVICE_STATE_UNAVAILABLE) { + return false; + } else { + return true; + } } /** @@ -700,12 +738,9 @@ public class AudioManager { * * @param on set <var>true</var> to route audio to/from wired * headset; <var>false</var> disable wired headset audio - * @hide + * @deprecated Do not use. */ - public void setWiredHeadsetOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_HEADSET: 0, ROUTE_HEADSET); + @Deprecated public void setWiredHeadsetOn(boolean on){ } /** @@ -713,25 +748,27 @@ public class AudioManager { * * @return true if audio is being routed to/from wired headset; * false if otherwise - * @hide */ public boolean isWiredHeadsetOn() { - return (getRoutingP(MODE_NORMAL) & ROUTE_HEADSET) == 0 ? false : true; + if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET,"") + == AudioSystem.DEVICE_STATE_UNAVAILABLE) { + return false; + } else { + return true; + } } /** * Sets the microphone mute on or off. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param on set <var>true</var> to mute the microphone; * <var>false</var> to turn mute off */ public void setMicrophoneMute(boolean on){ - IAudioService service = getService(); - try { - service.setMicrophoneMute(on); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setMicrophoneMute", e); - } + AudioSystem.muteMicrophone(on); } /** @@ -740,17 +777,18 @@ public class AudioManager { * @return true if microphone is muted, false if it's not */ public boolean isMicrophoneMute() { - IAudioService service = getService(); - try { - return service.isMicrophoneMute(); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in isMicrophoneMute", e); - return false; - } + return AudioSystem.isMicrophoneMuted(); } /** * Sets the audio mode. + * <p> + * The audio mode encompasses audio routing AND the behavior of + * the telephony layer. Therefore this method should only be used by applications that + * replace the platform-wide management of audio settings or the main telephony application. + * In particular, the {@link #MODE_IN_CALL} mode should only be used by the telephony + * application when it places a phone call, as it will cause signals from the radio layer + * to feed the platform mixer. * * @param mode the requested audio mode (NORMAL, RINGTONE, or IN_CALL). * Informs the HAL about the current audio state so that @@ -809,32 +847,46 @@ public class AudioManager { /* Routing bits for setRouting/getRouting API */ /** * Routing audio output to earpiece + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE; + @Deprecated public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE; /** - * Routing audio output to spaker + * Routing audio output to speaker + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER; + @Deprecated public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER; /** * @deprecated use {@link #ROUTE_BLUETOOTH_SCO} + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ @Deprecated public static final int ROUTE_BLUETOOTH = AudioSystem.ROUTE_BLUETOOTH_SCO; /** * Routing audio output to bluetooth SCO + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO; + @Deprecated public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO; /** * Routing audio output to headset + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET; + @Deprecated public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET; /** * Routing audio output to bluetooth A2DP + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP; + @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP; /** * Used for mask parameter of {@link #setRouting(int,int,int)}. + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL; + @Deprecated public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL; /** * Sets the audio routing for a specified mode @@ -846,16 +898,10 @@ public class AudioManager { * ROUTE_xxx types. Unset bits indicate the route should be left unchanged * * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), - * setBluetoothScoOn(), setBluetoothA2dpOn() and setWiredHeadsetOn() methods instead. + * setBluetoothScoOn() methods instead. */ - + @Deprecated public void setRouting(int mode, int routes, int mask) { - IAudioService service = getService(); - try { - service.setRouting(mode, routes, mask); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setRouting", e); - } } /** @@ -869,13 +915,7 @@ public class AudioManager { */ @Deprecated public int getRouting(int mode) { - IAudioService service = getService(); - try { - return service.getRouting(mode); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in getRouting", e); - return -1; - } + return -1; } /** @@ -884,13 +924,7 @@ public class AudioManager { * @return true if any music tracks are active. */ public boolean isMusicActive() { - IAudioService service = getService(); - try { - return service.isMusicActive(); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in isMusicActive", e); - return false; - } + return AudioSystem.isMusicActive(); } /* @@ -906,14 +940,32 @@ public class AudioManager { */ /** * @hide + * @deprecated Use {@link #setPrameters(String)} instead */ - public void setParameter(String key, String value) { - IAudioService service = getService(); - try { - service.setParameter(key, value); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setParameter", e); - } + @Deprecated public void setParameter(String key, String value) { + setParameters(key+"="+value); + } + + /** + * Sets a variable number of parameter values to audio hardware. + * + * @param keyValuePairs list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + * + */ + public void setParameters(String keyValuePairs) { + AudioSystem.setParameters(keyValuePairs); + } + + /** + * Sets a varaible number of parameter values to audio hardware. + * + * @param keys list of parameters + * @return list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + */ + public String getParameters(String keys) { + return AudioSystem.getParameters(keys); } /* Sound effect identifiers */ @@ -1011,7 +1063,9 @@ public class AudioManager { * {@link #FX_KEYPRESS_SPACEBAR}, * {@link #FX_KEYPRESS_DELETE}, * {@link #FX_KEYPRESS_RETURN}, - * @param volume Sound effect volume + * @param volume Sound effect volume. + * The volume value is a raw scalar so UI controls should be scaled logarithmically. + * If a volume of -1 is specified, the AudioManager.STREAM_MUSIC stream volume minus 3dB will be used. * NOTE: This version is for applications that have their own * settings panel for enabling and controlling volume. */ @@ -1082,31 +1136,4 @@ public class AudioManager { * {@hide} */ private IBinder mICallBack = new Binder(); - - /** - * {@hide} - */ - private void setRoutingP(int mode, int routes, int mask) { - IAudioService service = getService(); - try { - service.setRouting(mode, routes, mask); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setRouting", e); - } - } - - - /** - * {@hide} - */ - private int getRoutingP(int mode) { - IAudioService service = getService(); - try { - return service.getRouting(mode); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in getRouting", e); - return -1; - } - } - } diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java index 4d1535f..7a47157 100644 --- a/media/java/android/media/AudioRecord.java +++ b/media/java/android/media/AudioRecord.java @@ -86,7 +86,7 @@ public class AudioRecord public static final int ERROR_INVALID_OPERATION = -3; private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT = -16; - private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELCOUNT = -17; + private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK = -17; private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT = -18; private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE = -19; private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED = -20; @@ -133,9 +133,13 @@ public class AudioRecord */ private int mChannelCount = 1; /** + * The audio channel mask + */ + private int mChannels = AudioFormat.CHANNEL_IN_MONO; + /** * The current audio channel configuration */ - private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + private int mChannelConfiguration = AudioFormat.CHANNEL_IN_MONO; /** * The encoding of the audio samples. * @see AudioFormat#ENCODING_PCM_8BIT @@ -193,8 +197,8 @@ public class AudioRecord * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but * not limited to) 44100, 22050 and 11025. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_IN_MONO} and + * {@link AudioFormat#CHANNEL_IN_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -224,7 +228,7 @@ public class AudioRecord //TODO: update native initialization when information about hardware init failure // due to capture device already open is available. int initResult = native_setup( new WeakReference<AudioRecord>(this), - mRecordSource, mSampleRate, mChannelCount, mAudioFormat, mNativeBufferSizeInBytes); + mRecordSource, mSampleRate, mChannels, mAudioFormat, mNativeBufferSizeInBytes); if (initResult != SUCCESS) { loge("Error code "+initResult+" when initializing native AudioRecord object."); return; // with mState == STATE_UNINITIALIZED @@ -239,6 +243,7 @@ public class AudioRecord // postconditions: // mRecordSource is valid // mChannelCount is valid + // mChannels is valid // mAudioFormat is valid // mSampleRate is valid private void audioParamCheck(int audioSource, int sampleRateInHz, @@ -264,20 +269,25 @@ public class AudioRecord //-------------- // channel config + mChannelConfiguration = channelConfig; + switch (channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_IN_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: mChannelCount = 1; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + mChannels = AudioFormat.CHANNEL_IN_MONO; break; + case AudioFormat.CHANNEL_IN_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: mChannelCount = 2; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + mChannels = AudioFormat.CHANNEL_IN_STEREO; break; default: mChannelCount = 0; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID; - throw (new IllegalArgumentException("Unsupported channel configuration.")); + mChannels = AudioFormat.CHANNEL_INVALID; + mChannelConfiguration = AudioFormat.CHANNEL_INVALID; + throw (new IllegalArgumentException("Unsupported channel configuration.")); } //-------------- @@ -368,8 +378,8 @@ public class AudioRecord /** * Returns the configured channel configuration. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} - * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}. + * See {@link AudioFormat#CHANNEL_IN_MONO} + * and {@link AudioFormat#CHANNEL_IN_STEREO}. */ public int getChannelConfiguration() { return mChannelConfiguration; @@ -425,8 +435,8 @@ public class AudioRecord * will be polled for new data. * @param sampleRateInHz the sample rate expressed in Hertz. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_IN_MONO} and + * {@link AudioFormat#CHANNEL_IN_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT}. * @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the @@ -438,14 +448,16 @@ public class AudioRecord static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { int channelCount = 0; switch(channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_IN_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: channelCount = 1; break; + case AudioFormat.CHANNEL_IN_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: channelCount = 2; break; - case AudioFormat.CHANNEL_CONFIGURATION_INVALID: + case AudioFormat.CHANNEL_INVALID: default: loge("getMinBufferSize(): Invalid channel configuration."); return AudioRecord.ERROR_BAD_VALUE; diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java index 58c04f3..58a0bba 100644 --- a/media/java/android/media/AudioService.java +++ b/media/java/android/media/AudioService.java @@ -17,9 +17,16 @@ package android.media; import android.app.ActivityManagerNative; +import android.content.BroadcastReceiver; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; +import android.content.IntentFilter; +import android.bluetooth.BluetoothA2dp; +import android.bluetooth.BluetoothClass; +import android.bluetooth.BluetoothDevice; +import android.bluetooth.BluetoothHeadset; + import android.content.pm.PackageManager; import android.database.ContentObserver; import android.media.MediaPlayer.OnCompletionListener; @@ -36,11 +43,16 @@ import android.provider.Settings; import android.provider.Settings.System; import android.util.Log; import android.view.VolumePanel; +import android.os.SystemProperties; import com.android.internal.telephony.ITelephony; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; /** @@ -94,16 +106,10 @@ public class AudioService extends IAudioService.Stub { /** @see VolumeStreamState */ private VolumeStreamState[] mStreamStates; private SettingsObserver mSettingsObserver; - - private boolean mMicMute; + private int mMode; - private int[] mRoutes = new int[AudioSystem.NUM_MODES]; private Object mSettingsLock = new Object(); private boolean mMediaServerOk; - private boolean mSpeakerIsOn; - private boolean mBluetoothScoIsConnected; - private boolean mHeadsetIsConnected; - private boolean mBluetoothA2dpIsConnected; private SoundPool mSoundPool; private Object mSoundEffectsLock = new Object(); @@ -135,6 +141,36 @@ public class AudioService extends IAudioService.Stub { {4, -1} // FX_FOCUS_RETURN }; + /** @hide Maximum volume index values for audio streams */ + private int[] MAX_STREAM_VOLUME = new int[] { + 5, // STREAM_VOICE_CALL + 7, // STREAM_SYSTEM + 7, // STREAM_RING + 15, // STREAM_MUSIC + 7, // STREAM_ALARM + 7, // STREAM_NOTIFICATION + 15, // STREAM_BLUETOOTH_SCO + 7, // STREAM_SYSTEM_ENFORCED + 15, // STREAM_DTMF + 15 // STREAM_TTS + }; + /* STREAM_VOLUME_ALIAS[] indicates for each stream if it uses the volume settings + * of another stream: This avoids multiplying the volume settings for hidden + * stream types that follow other stream behavior for volume settings + * NOTE: do not create loops in aliases! */ + private int[] STREAM_VOLUME_ALIAS = new int[] { + AudioSystem.STREAM_VOICE_CALL, // STREAM_VOICE_CALL + AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM + AudioSystem.STREAM_RING, // STREAM_RING + AudioSystem.STREAM_MUSIC, // STREAM_MUSIC + AudioSystem.STREAM_ALARM, // STREAM_ALARM + AudioSystem.STREAM_NOTIFICATION, // STREAM_NOTIFICATION + AudioSystem.STREAM_VOICE_CALL, // STREAM_BLUETOOTH_SCO + AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM_ENFORCED + AudioSystem.STREAM_VOICE_CALL, // STREAM_DTMF + AudioSystem.STREAM_MUSIC // STREAM_TTS + }; + private AudioSystem.ErrorCallback mAudioSystemCallback = new AudioSystem.ErrorCallback() { public void onError(int error) { switch (error) { @@ -142,12 +178,14 @@ public class AudioService extends IAudioService.Stub { if (mMediaServerOk) { sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SHARED_MSG, SENDMSG_NOOP, 0, 0, null, 1500); + mMediaServerOk = false; } break; case AudioSystem.AUDIO_STATUS_OK: if (!mMediaServerOk) { sendMsg(mAudioHandler, MSG_MEDIA_SERVER_STARTED, SHARED_MSG, SENDMSG_NOOP, 0, 0, null, 0); + mMediaServerOk = true; } break; default: @@ -178,6 +216,27 @@ public class AudioService extends IAudioService.Stub { */ private int mVibrateSetting; + /** @see System#NOTIFICATIONS_USE_RING_VOLUME */ + private int mNotificationsUseRingVolume; + + // Broadcast receiver for device connections intent broadcasts + private final BroadcastReceiver mReceiver = new AudioServiceBroadcastReceiver(); + + //TODO: use common definitions with HeadsetObserver + private static final int BIT_HEADSET = (1 << 0); + private static final int BIT_HEADSET_NO_MIC = (1 << 1); + private static final int BIT_TTY = (1 << 2); + private static final int BIT_FM_HEADSET = (1 << 3); + private static final int BIT_FM_SPEAKER = (1 << 4); + + private int mHeadsetState; + + // Devices currently connected + private HashMap <Integer, String> mConnectedDevices = new HashMap <Integer, String>(); + + // Forced device usage for communications + private int mForcedUseForComm; + /////////////////////////////////////////////////////////////////////////// // Construction /////////////////////////////////////////////////////////////////////////// @@ -186,20 +245,31 @@ public class AudioService extends IAudioService.Stub { public AudioService(Context context) { mContext = context; mContentResolver = context.getContentResolver(); + + // Intialized volume + MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = SystemProperties.getInt( + "ro.config.vc_call_vol_steps", + MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]); + mVolumePanel = new VolumePanel(context, this); mSettingsObserver = new SettingsObserver(); - + mMode = AudioSystem.MODE_NORMAL; + mHeadsetState = 0; + mForcedUseForComm = AudioSystem.FORCE_NONE; createAudioSystemThread(); - createStreamStates(); readPersistedSettings(); - readAudioSettings(); + createStreamStates(); mMediaServerOk = true; AudioSystem.setErrorCallback(mAudioSystemCallback); loadSoundEffects(); - mSpeakerIsOn = false; - mBluetoothScoIsConnected = false; - mHeadsetIsConnected = false; - mBluetoothA2dpIsConnected = false; + + // Register for device connection intent broadcasts. + IntentFilter intentFilter = + new IntentFilter(Intent.ACTION_HEADSET_PLUG); + intentFilter.addAction(BluetoothA2dp.ACTION_SINK_STATE_CHANGED); + intentFilter.addAction(BluetoothHeadset.ACTION_STATE_CHANGED); + context.registerReceiver(mReceiver, intentFilter); + } private void createAudioSystemThread() { @@ -223,63 +293,23 @@ public class AudioService extends IAudioService.Stub { } private void createStreamStates() { - final int[] volumeLevelsPhone = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_VOICE_CALL]); - final int[] volumeLevelsCoarse = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_SYSTEM]); - final int[] volumeLevelsFine = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_MUSIC]); - final int[] volumeLevelsBtPhone = - createVolumeLevels(0, - AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_BLUETOOTH_SCO]); - int numStreamTypes = AudioSystem.getNumStreamTypes(); VolumeStreamState[] streams = mStreamStates = new VolumeStreamState[numStreamTypes]; for (int i = 0; i < numStreamTypes; i++) { - final int[] levels; - - switch (i) { - - case AudioSystem.STREAM_MUSIC: - levels = volumeLevelsFine; - break; - - case AudioSystem.STREAM_VOICE_CALL: - levels = volumeLevelsPhone; - break; - - case AudioSystem.STREAM_BLUETOOTH_SCO: - levels = volumeLevelsBtPhone; - break; - - default: - levels = volumeLevelsCoarse; - break; - } - - if (i == AudioSystem.STREAM_BLUETOOTH_SCO) { - streams[i] = new VolumeStreamState(AudioManager.DEFAULT_STREAM_VOLUME[i], i,levels); - } else { - streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[i], i, levels); - } - } - } - - private static int[] createVolumeLevels(int offset, int numlevels) { - double curve = 1.0f; // 1.4f - int [] volumes = new int[numlevels + offset]; - for (int i = 0; i < offset; i++) { - volumes[i] = 0; + streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[STREAM_VOLUME_ALIAS[i]], i); } - double val = 0; - double max = Math.pow(numlevels - 1, curve); - for (int i = 0; i < numlevels; i++) { - val = Math.pow(i, curve) / max; - volumes[offset + i] = (int) (val * 100.0f); + // Correct stream index values for streams with aliases + for (int i = 0; i < numStreamTypes; i++) { + if (STREAM_VOLUME_ALIAS[i] != i) { + int index = rescaleIndex(streams[i].mIndex, STREAM_VOLUME_ALIAS[i], i); + streams[i].mIndex = streams[i].getValidIndex(index); + setStreamVolumeIndex(i, index); + index = rescaleIndex(streams[i].mLastAudibleIndex, STREAM_VOLUME_ALIAS[i], i); + streams[i].mLastAudibleIndex = streams[i].getValidIndex(index); + } } - return volumes; } private void readPersistedSettings() { @@ -291,12 +321,19 @@ public class AudioService extends IAudioService.Stub { mRingerModeAffectedStreams = Settings.System.getInt(cr, Settings.System.MODE_RINGER_STREAMS_AFFECTED, - ((1 << AudioManager.STREAM_RING)|(1 << AudioManager.STREAM_NOTIFICATION)|(1 << AudioManager.STREAM_SYSTEM))); + ((1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_NOTIFICATION)| + (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED))); mMuteAffectedStreams = System.getInt(cr, System.MUTE_STREAMS_AFFECTED, ((1 << AudioSystem.STREAM_MUSIC)|(1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_SYSTEM))); + mNotificationsUseRingVolume = System.getInt(cr, + Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1); + + if (mNotificationsUseRingVolume == 1) { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING; + } // Each stream will read its own persisted settings // Broadcast the sticky intent @@ -307,25 +344,13 @@ public class AudioService extends IAudioService.Stub { broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_NOTIFICATION); } - private void readAudioSettings() { - synchronized (mSettingsLock) { - mMicMute = AudioSystem.isMicrophoneMuted(); - mMode = AudioSystem.getMode(); - for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) { - mRoutes[mode] = AudioSystem.getRouting(mode); - } - } + private void setStreamVolumeIndex(int stream, int index) { + AudioSystem.setStreamVolumeIndex(stream, (index + 5)/10); } - private void applyAudioSettings() { - synchronized (mSettingsLock) { - AudioSystem.muteMicrophone(mMicMute); - AudioSystem.setMode(mMode); - for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) { - AudioSystem.setRouting(mode, mRoutes[mode], AudioSystem.ROUTE_ALL); - } - } - } + private int rescaleIndex(int index, int srcStream, int dstStream) { + return (index * mStreamStates[dstStream].getMaxIndex() + mStreamStates[srcStream].getMaxIndex() / 2) / mStreamStates[srcStream].getMaxIndex(); + } /////////////////////////////////////////////////////////////////////////// // IPC methods @@ -354,44 +379,26 @@ public class AudioService extends IAudioService.Stub { ensureValidDirection(direction); ensureValidStreamType(streamType); - boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver, - Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1; - if (notificationsUseRingVolume && streamType == AudioManager.STREAM_NOTIFICATION) { - // Redirect the volume change to the ring stream - streamType = AudioManager.STREAM_RING; - } - VolumeStreamState streamState = mStreamStates[streamType]; + VolumeStreamState streamState = mStreamStates[STREAM_VOLUME_ALIAS[streamType]]; final int oldIndex = streamState.mIndex; boolean adjustVolume = true; // If either the client forces allowing ringer modes for this adjustment, // or the stream type is one that is affected by ringer modes if ((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0 - || streamType == AudioManager.STREAM_RING) { + || streamType == AudioSystem.STREAM_RING) { // Check if the ringer mode changes with this volume adjustment. If // it does, it will handle adjusting the volume, so we won't below adjustVolume = checkForRingerModeChange(oldIndex, direction); } if (adjustVolume && streamState.adjustIndex(direction)) { - - boolean alsoUpdateNotificationVolume = notificationsUseRingVolume && - streamType == AudioManager.STREAM_RING; - if (alsoUpdateNotificationVolume) { - mStreamStates[AudioManager.STREAM_NOTIFICATION].adjustIndex(direction); - } - // Post message to set system volume (it in turn will post a message // to persist). Do not change volume if stream is muted. if (streamState.muteCount() == 0) { - sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, streamType, SENDMSG_NOOP, 0, 0, + sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, STREAM_VOLUME_ALIAS[streamType], SENDMSG_NOOP, 0, 0, streamState, 0); - - if (alsoUpdateNotificationVolume) { - sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, AudioManager.STREAM_NOTIFICATION, - SENDMSG_NOOP, 0, 0, mStreamStates[AudioManager.STREAM_NOTIFICATION], 0); - } } } @@ -404,9 +411,8 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#setStreamVolume(int, int, int) */ public void setStreamVolume(int streamType, int index, int flags) { ensureValidStreamType(streamType); - syncRingerAndNotificationStreamVolume(streamType, index, false); - - setStreamVolumeInt(streamType, index, false, true); + index = rescaleIndex(index * 10, streamType, STREAM_VOLUME_ALIAS[streamType]); + setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, false, true); // UI, etc. mVolumePanel.postVolumeChanged(streamType, flags); @@ -420,37 +426,12 @@ public class AudioService extends IAudioService.Stub { intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, getStreamVolume(streamType)); // Currently, sending the intent only when the stream is BLUETOOTH_SCO - if (streamType == AudioManager.STREAM_BLUETOOTH_SCO) { + if (streamType == AudioSystem.STREAM_BLUETOOTH_SCO) { mContext.sendBroadcast(intent); } } /** - * Sync the STREAM_RING and STREAM_NOTIFICATION volumes if mandated by the - * value in Settings. - * - * @param streamType Type of the stream - * @param index Volume index for the stream - * @param force If true, set the volume even if the current and desired - * volume as same - */ - private void syncRingerAndNotificationStreamVolume(int streamType, int index, boolean force) { - boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver, - Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1; - if (notificationsUseRingVolume) { - if (streamType == AudioManager.STREAM_NOTIFICATION) { - // Redirect the volume change to the ring stream - streamType = AudioManager.STREAM_RING; - } - if (streamType == AudioManager.STREAM_RING) { - // One-off to sync notification volume to ringer volume - setStreamVolumeInt(AudioManager.STREAM_NOTIFICATION, index, force, true); - } - } - } - - - /** * Sets the stream state's index, and posts a message to set system volume. * This will not call out to the UI. Assumes a valid stream type. * @@ -491,13 +472,13 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#getStreamVolume(int) */ public int getStreamVolume(int streamType) { ensureValidStreamType(streamType); - return mStreamStates[streamType].mIndex; + return (mStreamStates[streamType].mIndex + 5) / 10; } /** @see AudioManager#getStreamMaxVolume(int) */ public int getStreamMaxVolume(int streamType) { ensureValidStreamType(streamType); - return mStreamStates[streamType].getMaxIndex(); + return (mStreamStates[streamType].getMaxIndex() + 5) / 10; } /** @see AudioManager#getRingerMode() */ @@ -507,11 +488,12 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#setRingerMode(int) */ public void setRingerMode(int ringerMode) { - if (ringerMode != mRingerMode) { - setRingerModeInt(ringerMode, true); - - // Send sticky broadcast - broadcastRingerMode(); + synchronized (mSettingsLock) { + if (ringerMode != mRingerMode) { + setRingerModeInt(ringerMode, true); + // Send sticky broadcast + broadcastRingerMode(); + } } } @@ -541,7 +523,7 @@ public class AudioService extends IAudioService.Stub { } } } - + // Post a persist ringer mode msg if (persist) { sendMsg(mAudioHandler, MSG_PERSIST_RINGER_MODE, SHARED_MSG, @@ -606,39 +588,28 @@ public class AudioService extends IAudioService.Stub { return existingValue; } - /** @see AudioManager#setMicrophoneMute(boolean) */ - public void setMicrophoneMute(boolean on) { - if (!checkAudioSettingsPermission("setMicrophoneMute()")) { - return; - } - synchronized (mSettingsLock) { - if (on != mMicMute) { - AudioSystem.muteMicrophone(on); - mMicMute = on; - } - } - } - - /** @see AudioManager#isMicrophoneMute() */ - public boolean isMicrophoneMute() { - return mMicMute; - } - /** @see AudioManager#setMode(int) */ public void setMode(int mode) { if (!checkAudioSettingsPermission("setMode()")) { return; } + + if (mode < AudioSystem.MODE_CURRENT || mode > AudioSystem.MODE_IN_CALL) { + return; + } + synchronized (mSettingsLock) { + if (mode == AudioSystem.MODE_CURRENT) { + mode = mMode; + } if (mode != mMode) { - if (AudioSystem.setMode(mode) == AudioSystem.AUDIO_STATUS_OK) { + if (AudioSystem.setPhoneState(mode) == AudioSystem.AUDIO_STATUS_OK) { mMode = mode; } } int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE); - int index = mStreamStates[streamType].mIndex; - syncRingerAndNotificationStreamVolume(streamType, index, true); - setStreamVolumeInt(streamType, index, true, true); + int index = mStreamStates[STREAM_VOLUME_ALIAS[streamType]].mIndex; + setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, true, true); } } @@ -647,195 +618,15 @@ public class AudioService extends IAudioService.Stub { return mMode; } - /** @see AudioManager#setRouting(int, int, int) */ - public void setRouting(int mode, int routes, int mask) { - int incallMask = 0; - int ringtoneMask = 0; - int normalMask = 0; - - if (!checkAudioSettingsPermission("setRouting()")) { - return; - } - synchronized (mSettingsLock) { - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // mode AudioSystem.MODE_INVALID is used only by the following AudioManager methods: - // setWiredHeadsetOn(), setBluetoothA2dpOn(), setBluetoothScoOn() and setSpeakerphoneOn(). - // If applications are using AudioManager.setRouting() that is now deprecated, the routing - // command will be ignored. - if (mode == AudioSystem.MODE_INVALID) { - switch (mask) { - case AudioSystem.ROUTE_SPEAKER: - // handle setSpeakerphoneOn() - if (routes != 0 && !mSpeakerIsOn) { - mSpeakerIsOn = true; - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - incallMask = AudioSystem.ROUTE_ALL; - } else if (routes == 0 && mSpeakerIsOn) { - mSpeakerIsOn = false; - if (mBluetoothScoIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO; - } else if (mHeadsetIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - incallMask = AudioSystem.ROUTE_ALL; - } - break; - - case AudioSystem.ROUTE_BLUETOOTH_SCO: - // handle setBluetoothScoOn() - if (routes != 0 && !mBluetoothScoIsConnected) { - mBluetoothScoIsConnected = true; - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_BLUETOOTH_SCO; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_BLUETOOTH_SCO; - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than SCO headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } else if (routes == 0 && mBluetoothScoIsConnected) { - mBluetoothScoIsConnected = false; - if (mHeadsetIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER); - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_HEADSET; - } else { - if (mSpeakerIsOn) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - } - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than SCO headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - break; - - case AudioSystem.ROUTE_HEADSET: - // handle setWiredHeadsetOn() - if (routes != 0 && !mHeadsetIsConnected) { - mHeadsetIsConnected = true; - // do not act upon headset connection if bluetooth SCO is connected to match phone app behavior - if (!mBluetoothScoIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER); - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_HEADSET; - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than wired headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - } else if (routes == 0 && mHeadsetIsConnected) { - mHeadsetIsConnected = false; - // do not act upon headset disconnection if bluetooth SCO is connected to match phone app behavior - if (!mBluetoothScoIsConnected) { - if (mSpeakerIsOn) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than wired headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - } - break; - - case AudioSystem.ROUTE_BLUETOOTH_A2DP: - // handle setBluetoothA2dpOn() - if (routes != 0 && !mBluetoothA2dpIsConnected) { - mBluetoothA2dpIsConnected = true; - mRoutes[AudioSystem.MODE_RINGTONE] |= AudioSystem.ROUTE_BLUETOOTH_A2DP; - mRoutes[AudioSystem.MODE_NORMAL] |= AudioSystem.ROUTE_BLUETOOTH_A2DP; - // the audio flinger chooses A2DP as a higher priority, - // so there is no need to disable other routes. - ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - } else if (routes == 0 && mBluetoothA2dpIsConnected) { - mBluetoothA2dpIsConnected = false; - mRoutes[AudioSystem.MODE_RINGTONE] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - mRoutes[AudioSystem.MODE_NORMAL] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - // the audio flinger chooses A2DP as a higher priority, - // so there is no need to disable other routes. - ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - break; - } - - // incallMask is != 0 means we must apply ne routing to MODE_IN_CALL mode - if (incallMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_IN_CALL, - mRoutes[AudioSystem.MODE_IN_CALL], - incallMask); - } - // ringtoneMask is != 0 means we must apply ne routing to MODE_RINGTONE mode - if (ringtoneMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_RINGTONE, - mRoutes[AudioSystem.MODE_RINGTONE], - ringtoneMask); - } - // normalMask is != 0 means we must apply ne routing to MODE_NORMAL mode - if (normalMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_NORMAL, - mRoutes[AudioSystem.MODE_NORMAL], - normalMask); - } - - int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE); - int index = mStreamStates[streamType].mIndex; - syncRingerAndNotificationStreamVolume(streamType, index, true); - setStreamVolumeInt(streamType, index, true, true); - } - } - } - - /** @see AudioManager#getRouting(int) */ - public int getRouting(int mode) { - return mRoutes[mode]; - } - - /** @see AudioManager#isMusicActive() */ - public boolean isMusicActive() { - return AudioSystem.isMusicActive(); - } - - /** @see AudioManager#setParameter(String, String) */ - public void setParameter(String key, String value) { - AudioSystem.setParameter(key, value); - } - /** @see AudioManager#playSoundEffect(int) */ public void playSoundEffect(int effectType) { sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SHARED_MSG, SENDMSG_NOOP, - effectType, SOUND_EFFECT_VOLUME, null, 0); + effectType, -1, null, 0); } /** @see AudioManager#playSoundEffect(int, float) */ public void playSoundEffectVolume(int effectType, float volume) { + loadSoundEffects(); sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SHARED_MSG, SENDMSG_NOOP, effectType, (int) (volume * 1000), null, 0); } @@ -846,6 +637,9 @@ public class AudioService extends IAudioService.Stub { */ public boolean loadSoundEffects() { synchronized (mSoundEffectsLock) { + if (mSoundPool != null) { + return true; + } mSoundPool = new SoundPool(NUM_SOUNDPOOL_CHANNELS, AudioSystem.STREAM_SYSTEM, 0); if (mSoundPool == null) { return false; @@ -926,18 +720,29 @@ public class AudioService extends IAudioService.Stub { for (int streamType = 0; streamType < numStreamTypes; streamType++) { VolumeStreamState streamState = mStreamStates[streamType]; - // there is no volume setting for STREAM_BLUETOOTH_SCO - if (streamType != AudioSystem.STREAM_BLUETOOTH_SCO) { - String settingName = System.VOLUME_SETTINGS[streamType]; - String lastAudibleSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; - - streamState.mIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver, - settingName, - AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - streamState.mLastAudibleIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver, - lastAudibleSettingName, - streamState.mIndex > 0 ? streamState.mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType])); + String settingName = System.VOLUME_SETTINGS[STREAM_VOLUME_ALIAS[streamType]]; + String lastAudibleSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; + int index = Settings.System.getInt(mContentResolver, + settingName, + AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + if (STREAM_VOLUME_ALIAS[streamType] != streamType) { + index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType); + } else { + index *= 10; } + streamState.mIndex = streamState.getValidIndex(index); + + index = (index + 5) / 10; + index = Settings.System.getInt(mContentResolver, + lastAudibleSettingName, + (index > 0) ? index : AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + if (STREAM_VOLUME_ALIAS[streamType] != streamType) { + index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType); + } else { + index *= 10; + } + streamState.mLastAudibleIndex = streamState.getValidIndex(index); + // unmute stream that whas muted but is not affect by mute anymore if (streamState.muteCount() != 0 && !isStreamAffectedByMute(streamType)) { int size = streamState.mDeathHandlers.size(); @@ -948,7 +753,7 @@ public class AudioService extends IAudioService.Stub { } // apply stream volume if (streamState.muteCount() == 0) { - AudioSystem.setVolume(streamType, streamState.mVolumes[streamState.mIndex]); + setStreamVolumeIndex(streamType, streamState.mIndex); } } @@ -956,6 +761,54 @@ public class AudioService extends IAudioService.Stub { setRingerModeInt(getRingerMode(), false); } + /** @see AudioManager#setSpeakerphoneOn() */ + public void setSpeakerphoneOn(boolean on){ + if (!checkAudioSettingsPermission("setSpeakerphoneOn()")) { + return; + } + if (on) { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_SPEAKER); + mForcedUseForComm = AudioSystem.FORCE_SPEAKER; + } else { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE); + mForcedUseForComm = AudioSystem.FORCE_NONE; + } + } + + /** @see AudioManager#isSpeakerphoneOn() */ + public boolean isSpeakerphoneOn() { + if (mForcedUseForComm == AudioSystem.FORCE_SPEAKER) { + return true; + } else { + return false; + } + } + + /** @see AudioManager#setBluetoothScoOn() */ + public void setBluetoothScoOn(boolean on){ + if (!checkAudioSettingsPermission("setBluetoothScoOn()")) { + return; + } + if (on) { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_BT_SCO); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, AudioSystem.FORCE_BT_SCO); + mForcedUseForComm = AudioSystem.FORCE_BT_SCO; + } else { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, AudioSystem.FORCE_NONE); + mForcedUseForComm = AudioSystem.FORCE_NONE; + } + } + + /** @see AudioManager#isBluetoothScoOn() */ + public boolean isBluetoothScoOn() { + if (mForcedUseForComm == AudioSystem.FORCE_BT_SCO) { + return true; + } else { + return false; + } + } + /////////////////////////////////////////////////////////////////////////// // Internal methods /////////////////////////////////////////////////////////////////////////// @@ -969,7 +822,7 @@ public class AudioService extends IAudioService.Stub { boolean adjustVolumeIndex = true; int newRingerMode = mRingerMode; - if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && oldIndex == 1 + if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && (oldIndex + 5) / 10 == 1 && direction == AudioManager.ADJUST_LOWER) { newRingerMode = AudioManager.RINGER_MODE_VIBRATE; } else if (mRingerMode == AudioManager.RINGER_MODE_VIBRATE) { @@ -1026,7 +879,7 @@ public class AudioService extends IAudioService.Stub { Log.w(TAG, "Couldn't connect to phone service", e); } - if ((getRouting(AudioSystem.MODE_IN_CALL) & AudioSystem.ROUTE_BLUETOOTH_SCO) != 0) { + if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_BT_SCO) { // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO..."); return AudioSystem.STREAM_BLUETOOTH_SCO; } else if (isOffhook) { @@ -1106,51 +959,44 @@ public class AudioService extends IAudioService.Stub { /////////////////////////////////////////////////////////////////////////// public class VolumeStreamState { - private final String mVolumeIndexSettingName; - private final String mLastAudibleVolumeIndexSettingName; private final int mStreamType; - private final int[] mVolumes; + private String mVolumeIndexSettingName; + private String mLastAudibleVolumeIndexSettingName; + private int mIndexMax; private int mIndex; private int mLastAudibleIndex; private ArrayList<VolumeDeathHandler> mDeathHandlers; //handles mute/solo requests client death - private VolumeStreamState(String settingName, int streamType, int[] volumes) { + private VolumeStreamState(String settingName, int streamType) { - mVolumeIndexSettingName = settingName; - mLastAudibleVolumeIndexSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; + setVolumeIndexSettingName(settingName); mStreamType = streamType; - mVolumes = volumes; final ContentResolver cr = mContentResolver; - mIndex = getValidIndex(Settings.System.getInt(cr, mVolumeIndexSettingName, AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - mLastAudibleIndex = getValidIndex(Settings.System.getInt(cr, - mLastAudibleVolumeIndexSettingName, mIndex > 0 ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - - AudioSystem.setVolume(streamType, volumes[mIndex]); + mIndexMax = MAX_STREAM_VOLUME[streamType]; + mIndex = Settings.System.getInt(cr, + mVolumeIndexSettingName, + AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + mLastAudibleIndex = Settings.System.getInt(cr, + mLastAudibleVolumeIndexSettingName, + (mIndex > 0) ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + AudioSystem.initStreamVolume(streamType, 0, mIndexMax); + mIndexMax *= 10; + mIndex = getValidIndex(10 * mIndex); + mLastAudibleIndex = getValidIndex(10 * mLastAudibleIndex); + setStreamVolumeIndex(streamType, mIndex); mDeathHandlers = new ArrayList<VolumeDeathHandler>(); } - /** - * Constructor to be used when there is no setting associated with the VolumeStreamState. - * - * @param defaultVolume Default volume of the stream to use. - * @param streamType Type of the stream. - * @param volumes Volumes levels associated with this stream. - */ - private VolumeStreamState(int defaultVolume, int streamType, int[] volumes) { - mVolumeIndexSettingName = null; - mLastAudibleVolumeIndexSettingName = null; - mIndex = mLastAudibleIndex = defaultVolume; - mStreamType = streamType; - mVolumes = volumes; - AudioSystem.setVolume(mStreamType, defaultVolume); - mDeathHandlers = new ArrayList<VolumeDeathHandler>(); + public void setVolumeIndexSettingName(String settingName) { + mVolumeIndexSettingName = settingName; + mLastAudibleVolumeIndexSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; } public boolean adjustIndex(int deltaIndex) { - return setIndex(mIndex + deltaIndex, true); + return setIndex(mIndex + deltaIndex * 10, true); } public boolean setIndex(int index, boolean lastAudible) { @@ -1161,6 +1007,13 @@ public class AudioService extends IAudioService.Stub { if (lastAudible) { mLastAudibleIndex = mIndex; } + // Apply change to all streams using this one as alias + int numStreamTypes = AudioSystem.getNumStreamTypes(); + for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { + if (streamType != mStreamType && STREAM_VOLUME_ALIAS[streamType] == mStreamType) { + mStreamStates[streamType].setIndex(rescaleIndex(mIndex, mStreamType, streamType), lastAudible); + } + } return true; } else { return false; @@ -1168,7 +1021,7 @@ public class AudioService extends IAudioService.Stub { } public int getMaxIndex() { - return mVolumes.length - 1; + return mIndexMax; } public void mute(IBinder cb, boolean state) { @@ -1183,8 +1036,8 @@ public class AudioService extends IAudioService.Stub { private int getValidIndex(int index) { if (index < 0) { return 0; - } else if (index >= mVolumes.length) { - return mVolumes.length - 1; + } else if (index > mIndexMax) { + return mIndexMax; } return index; @@ -1318,8 +1171,16 @@ public class AudioService extends IAudioService.Stub { private void setSystemVolume(VolumeStreamState streamState) { // Adjust volume - AudioSystem - .setVolume(streamState.mStreamType, streamState.mVolumes[streamState.mIndex]); + setStreamVolumeIndex(streamState.mStreamType, streamState.mIndex); + + // Apply change to all streams using this one as alias + int numStreamTypes = AudioSystem.getNumStreamTypes(); + for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { + if (streamType != streamState.mStreamType && + STREAM_VOLUME_ALIAS[streamType] == streamState.mStreamType) { + setStreamVolumeIndex(streamType, mStreamStates[streamType].mIndex); + } + } // Post a persist volume msg sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, streamState.mStreamType, @@ -1327,12 +1188,10 @@ public class AudioService extends IAudioService.Stub { } private void persistVolume(VolumeStreamState streamState) { - if (streamState.mStreamType != AudioManager.STREAM_BLUETOOTH_SCO) { - System.putInt(mContentResolver, streamState.mVolumeIndexSettingName, - streamState.mIndex); - System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName, - streamState.mLastAudibleIndex); - } + System.putInt(mContentResolver, streamState.mVolumeIndexSettingName, + (streamState.mIndex + 5)/ 10); + System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName, + (streamState.mLastAudibleIndex + 5) / 10); } private void persistRingerMode() { @@ -1348,10 +1207,20 @@ public class AudioService extends IAudioService.Stub { if (mSoundPool == null) { return; } + float volFloat; + // use STREAM_MUSIC volume attenuated by 3 dB if volume is not specified by caller + if (volume < 0) { + // Same linear to log conversion as in native AudioSystem::linearToLog() (AudioSystem.cpp) + float dBPerStep = (float)((0.5 * 100) / MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]); + int musicVolIndex = (mStreamStates[AudioSystem.STREAM_MUSIC].mIndex + 5) / 10; + float musicVoldB = dBPerStep * (musicVolIndex - MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]); + volFloat = (float)Math.pow(10, (musicVoldB - 3)/20); + } else { + volFloat = (float) volume / 1000.0f; + } if (SOUND_EFFECT_FILES_MAP[effectType][1] > 0) { - float v = (float) volume / 1000.0f; - mSoundPool.play(SOUND_EFFECT_FILES_MAP[effectType][1], v, v, 0, 0, 1.0f); + mSoundPool.play(SOUND_EFFECT_FILES_MAP[effectType][1], volFloat, volFloat, 0, 0, 1.0f); } else { MediaPlayer mediaPlayer = new MediaPlayer(); if (mediaPlayer != null) { @@ -1360,6 +1229,7 @@ public class AudioService extends IAudioService.Stub { mediaPlayer.setDataSource(filePath); mediaPlayer.setAudioStreamType(AudioSystem.STREAM_SYSTEM); mediaPlayer.prepare(); + mediaPlayer.setVolume(volFloat, volFloat); mediaPlayer.setOnCompletionListener(new OnCompletionListener() { public void onCompletion(MediaPlayer mp) { cleanupPlayer(mp); @@ -1418,29 +1288,50 @@ public class AudioService extends IAudioService.Stub { break; case MSG_MEDIA_SERVER_DIED: - Log.e(TAG, "Media server died."); // Force creation of new IAudioflinger interface - mMediaServerOk = false; - AudioSystem.getMode(); + if (!mMediaServerOk) { + Log.e(TAG, "Media server died."); + AudioSystem.isMusicActive(); + sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SHARED_MSG, SENDMSG_NOOP, 0, 0, + null, 500); + } break; case MSG_MEDIA_SERVER_STARTED: Log.e(TAG, "Media server started."); - // Restore audio routing and stream volumes - applyAudioSettings(); + // Restore device connection states + Set set = mConnectedDevices.entrySet(); + Iterator i = set.iterator(); + while(i.hasNext()){ + Map.Entry device = (Map.Entry)i.next(); + AudioSystem.setDeviceConnectionState(((Integer)device.getKey()).intValue(), + AudioSystem.DEVICE_STATE_AVAILABLE, + (String)device.getValue()); + } + + // Restore call state + AudioSystem.setPhoneState(mMode); + + // Restore forced usage for communcations and record + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, mForcedUseForComm); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, mForcedUseForComm); + + // Restore stream volumes int numStreamTypes = AudioSystem.getNumStreamTypes(); for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { - int volume; + int index; VolumeStreamState streamState = mStreamStates[streamType]; + AudioSystem.initStreamVolume(streamType, 0, (streamState.mIndexMax + 5) / 10); if (streamState.muteCount() == 0) { - volume = streamState.mVolumes[streamState.mIndex]; + index = streamState.mIndex; } else { - volume = streamState.mVolumes[0]; + index = 0; } - AudioSystem.setVolume(streamType, volume); + setStreamVolumeIndex(streamType, index); } - setRingerMode(mRingerMode); - mMediaServerOk = true; + + // Restore ringer mode + setRingerModeInt(getRingerMode(), false); break; case MSG_PLAY_SOUND_EFFECT: @@ -1451,28 +1342,191 @@ public class AudioService extends IAudioService.Stub { } private class SettingsObserver extends ContentObserver { - + SettingsObserver() { super(new Handler()); mContentResolver.registerContentObserver(Settings.System.getUriFor( Settings.System.MODE_RINGER_STREAMS_AFFECTED), false, this); + mContentResolver.registerContentObserver(Settings.System.getUriFor( + Settings.System.NOTIFICATIONS_USE_RING_VOLUME), false, this); } @Override public void onChange(boolean selfChange) { super.onChange(selfChange); - - mRingerModeAffectedStreams = Settings.System.getInt(mContentResolver, - Settings.System.MODE_RINGER_STREAMS_AFFECTED, - 0); + synchronized (mSettingsLock) { + int ringerModeAffectedStreams = Settings.System.getInt(mContentResolver, + Settings.System.MODE_RINGER_STREAMS_AFFECTED, + 0); + if (ringerModeAffectedStreams != mRingerModeAffectedStreams) { + /* + * Ensure all stream types that should be affected by ringer mode + * are in the proper state. + */ + mRingerModeAffectedStreams = ringerModeAffectedStreams; + setRingerModeInt(getRingerMode(), false); + } - /* - * Ensure all stream types that should be affected by ringer mode - * are in the proper state. - */ - setRingerModeInt(getRingerMode(), false); + int notificationsUseRingVolume = Settings.System.getInt(mContentResolver, + Settings.System.NOTIFICATIONS_USE_RING_VOLUME, + 1); + if (notificationsUseRingVolume != mNotificationsUseRingVolume) { + mNotificationsUseRingVolume = notificationsUseRingVolume; + if (mNotificationsUseRingVolume == 1) { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING; + mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName( + System.VOLUME_SETTINGS[AudioSystem.STREAM_RING]); + } else { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_NOTIFICATION; + mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName( + System.VOLUME_SETTINGS[AudioSystem.STREAM_NOTIFICATION]); + // Persist notification volume volume as it was not persisted while aliased to ring volume + // and persist with no delay as there might be registered observers of the persisted + // notification volume. + sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, AudioSystem.STREAM_NOTIFICATION, + SENDMSG_REPLACE, 0, 0, mStreamStates[AudioSystem.STREAM_NOTIFICATION], 0); + } + } + } + } + } + + /** + * Receiver for misc intent broadcasts the Phone app cares about. + */ + private class AudioServiceBroadcastReceiver extends BroadcastReceiver { + @Override + public void onReceive(Context context, Intent intent) { + String action = intent.getAction(); + + if (action.equals(BluetoothA2dp.ACTION_SINK_STATE_CHANGED)) { + int state = intent.getIntExtra(BluetoothA2dp.EXTRA_SINK_STATE, + BluetoothA2dp.STATE_DISCONNECTED); + BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); + String address = btDevice.getAddress(); + boolean isConnected = (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) && + ((String)mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)).equals(address)); + + if (isConnected && + state != BluetoothA2dp.STATE_CONNECTED && state != BluetoothA2dp.STATE_PLAYING) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + address); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP); + } else if (!isConnected && + (state == BluetoothA2dp.STATE_CONNECTED || + state == BluetoothA2dp.STATE_PLAYING)) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, + AudioSystem.DEVICE_STATE_AVAILABLE, + address); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP), + address); + } + } else if (action.equals(BluetoothHeadset.ACTION_STATE_CHANGED)) { + int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, + BluetoothHeadset.STATE_ERROR); + int device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO; + BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); + String address = null; + if (btDevice != null) { + address = btDevice.getAddress(); + BluetoothClass btClass = btDevice.getBluetoothClass(); + if (btClass != null) { + switch (btClass.getDeviceClass()) { + case BluetoothClass.Device.AUDIO_VIDEO_WEARABLE_HEADSET: + case BluetoothClass.Device.AUDIO_VIDEO_HANDSFREE: + device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_HEADSET; + break; + case BluetoothClass.Device.AUDIO_VIDEO_CAR_AUDIO: + device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_CARKIT; + break; + } + } + } + + boolean isConnected = (mConnectedDevices.containsKey(device) && + ((String)mConnectedDevices.get(device)).equals(address)); + + if (isConnected && state != BluetoothHeadset.STATE_CONNECTED) { + AudioSystem.setDeviceConnectionState(device, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + address); + mConnectedDevices.remove(device); + } else if (!isConnected && state == BluetoothHeadset.STATE_CONNECTED) { + AudioSystem.setDeviceConnectionState(device, + AudioSystem.DEVICE_STATE_AVAILABLE, + address); + mConnectedDevices.put(new Integer(device), address); + } + } else if (action.equals(Intent.ACTION_HEADSET_PLUG)) { + int state = intent.getIntExtra("state", 0); + if ((state & BIT_HEADSET) == 0 && + (mHeadsetState & BIT_HEADSET) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_WIRED_HEADSET); + } else if ((state & BIT_HEADSET) != 0 && + (mHeadsetState & BIT_HEADSET) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_WIRED_HEADSET), ""); + } + if ((state & BIT_HEADSET_NO_MIC) == 0 && + (mHeadsetState & BIT_HEADSET_NO_MIC) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE); + } else if ((state & BIT_HEADSET_NO_MIC) != 0 && + (mHeadsetState & BIT_HEADSET_NO_MIC) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE), ""); + } + if ((state & BIT_TTY) == 0 && + (mHeadsetState & BIT_TTY) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_TTY); + } else if ((state & BIT_TTY) != 0 && + (mHeadsetState & BIT_TTY) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_TTY), ""); + } + if ((state & BIT_FM_HEADSET) == 0 && + (mHeadsetState & BIT_FM_HEADSET) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_FM_HEADPHONE); + } else if ((state & BIT_FM_HEADSET) != 0 && + (mHeadsetState & BIT_FM_HEADSET) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_FM_HEADPHONE), ""); + } + if ((state & BIT_FM_SPEAKER) == 0 && + (mHeadsetState & BIT_FM_SPEAKER) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_FM_SPEAKER); + } else if ((state & BIT_FM_SPEAKER) != 0 && + (mHeadsetState & BIT_FM_SPEAKER) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_FM_SPEAKER), ""); + } + mHeadsetState = state; + } } - } - } diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java index 5917ab9..dbf6d9d 100644 --- a/media/java/android/media/AudioSystem.java +++ b/media/java/android/media/AudioSystem.java @@ -45,38 +45,21 @@ public class AudioSystem public static final int STREAM_NOTIFICATION = 5; /* @hide The audio stream for phone calls when connected on bluetooth */ public static final int STREAM_BLUETOOTH_SCO = 6; + /* @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */ + public static final int STREAM_SYSTEM_ENFORCED = 7; + /* @hide The audio stream for DTMF tones */ + public static final int STREAM_DTMF = 8; + /* @hide The audio stream for text to speech (TTS) */ + public static final int STREAM_TTS = 9; /** * @deprecated Use {@link #numStreamTypes() instead} */ public static final int NUM_STREAMS = 5; // Expose only the getter method publicly so we can change it in the future - private static final int NUM_STREAM_TYPES = 7; + private static final int NUM_STREAM_TYPES = 10; public static final int getNumStreamTypes() { return NUM_STREAM_TYPES; } - /* max and min volume levels */ - /* Maximum volume setting, for use with setVolume(int,int) */ - public static final int MAX_VOLUME = 100; - /* Minimum volume setting, for use with setVolume(int,int) */ - public static final int MIN_VOLUME = 0; - - /* - * Sets the volume of a specified audio stream. - * - * param type the stream type to set the volume of (e.g. STREAM_MUSIC) - * param volume the volume level to set (0-100) - * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR - */ - public static native int setVolume(int type, int volume); - - /* - * Returns the volume of a specified audio stream. - * - * param type the stream type to get the volume of (e.g. STREAM_MUSIC) - * return the current volume (0-100) - */ - public static native int getVolume(int type); - /* * Sets the microphone mute on or off. * @@ -101,17 +84,23 @@ public class AudioSystem * it can route the audio appropriately. * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR */ - public static native int setMode(int mode); - + /** @deprecated use {@link #setPhoneState(int)} */ + public static int setMode(int mode) { + return AUDIO_STATUS_ERROR; + } /* * Returns the current audio mode. * * return the current audio mode (NORMAL, RINGTONE, or IN_CALL). * Returns the current current audio state from the HAL. + * */ - public static native int getMode(); + /** @deprecated Do not use. */ + public static int getMode() { + return MODE_INVALID; + } - /* modes for setMode/getMode/setRoute/getRoute */ + /* modes for setPhoneState */ public static final int MODE_INVALID = -2; public static final int MODE_CURRENT = -1; public static final int MODE_NORMAL = 0; @@ -121,15 +110,20 @@ public class AudioSystem /* Routing bits for setRouting/getRouting API */ - public static final int ROUTE_EARPIECE = (1 << 0); - public static final int ROUTE_SPEAKER = (1 << 1); - + /** @deprecated */ + @Deprecated public static final int ROUTE_EARPIECE = (1 << 0); + /** @deprecated */ + @Deprecated public static final int ROUTE_SPEAKER = (1 << 1); /** @deprecated use {@link #ROUTE_BLUETOOTH_SCO} */ @Deprecated public static final int ROUTE_BLUETOOTH = (1 << 2); - public static final int ROUTE_BLUETOOTH_SCO = (1 << 2); - public static final int ROUTE_HEADSET = (1 << 3); - public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4); - public static final int ROUTE_ALL = 0xFFFFFFFF; + /** @deprecated */ + @Deprecated public static final int ROUTE_BLUETOOTH_SCO = (1 << 2); + /** @deprecated */ + @Deprecated public static final int ROUTE_HEADSET = (1 << 3); + /** @deprecated */ + @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4); + /** @deprecated */ + @Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF; /* * Sets the audio routing for a specified mode @@ -141,7 +135,10 @@ public class AudioSystem * ROUTE_xxx types. Unset bits indicate the route should be left unchanged * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR */ - public static native int setRouting(int mode, int routes, int mask); + /** @deprecated use {@link #setDeviceConnectionState(int,int,String)} */ + public static int setRouting(int mode, int routes, int mask) { + return AUDIO_STATUS_ERROR; + } /* * Returns the current audio routing bit vector for a specified mode. @@ -150,7 +147,10 @@ public class AudioSystem * return an audio route bit vector that can be compared with ROUTE_xxx * bits */ - public static native int getRouting(int mode); + /** @deprecated use {@link #getDeviceConnectionState(int,String)} */ + public static int getRouting(int mode) { + return 0; + } /* * Checks whether any music is active. @@ -160,17 +160,23 @@ public class AudioSystem public static native boolean isMusicActive(); /* - * Sets a generic audio configuration parameter. The use of these parameters + * Sets a group generic audio configuration parameters. The use of these parameters * are platform dependant, see libaudio * - * ** Temporary interface - DO NOT USE - * - * TODO: Replace with a more generic key:value get/set mechanism + * param keyValuePairs list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + */ + public static native int setParameters(String keyValuePairs); + + /* + * Gets a group generic audio configuration parameters. The use of these parameters + * are platform dependant, see libaudio * - * param key name of parameter to set. Must not be null. - * param value value of parameter. Must not be null. + * param keys list of parameters + * return value: list of parameters key value pairs in the form: + * key1=value1;key2=value2;... */ - public static native void setParameter(String key, String value); + public static native String getParameters(String keys); /* private final static String TAG = "audio"; @@ -220,4 +226,68 @@ public class AudioSystem mErrorCallback.onError(error); } } + + /* + * AudioPolicyService methods + */ + + // output devices + public static final int DEVICE_OUT_EARPIECE = 0x1; + public static final int DEVICE_OUT_SPEAKER = 0x2; + public static final int DEVICE_OUT_WIRED_HEADSET = 0x4; + public static final int DEVICE_OUT_WIRED_HEADPHONE = 0x8; + public static final int DEVICE_OUT_BLUETOOTH_SCO = 0x10; + public static final int DEVICE_OUT_BLUETOOTH_SCO_HEADSET = 0x20; + public static final int DEVICE_OUT_BLUETOOTH_SCO_CARKIT = 0x40; + public static final int DEVICE_OUT_BLUETOOTH_A2DP = 0x80; + public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100; + public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200; + public static final int DEVICE_OUT_AUX_DIGITAL = 0x400; + public static final int DEVICE_OUT_FM_HEADPHONE = 0x800; + public static final int DEVICE_OUT_FM_SPEAKER = 0x1000; + public static final int DEVICE_OUT_TTY = 0x2000; + public static final int DEVICE_OUT_DEFAULT = 0x8000; + // input devices + public static final int DEVICE_IN_COMMUNICATION = 0x10000; + public static final int DEVICE_IN_AMBIENT = 0x20000; + public static final int DEVICE_IN_BUILTIN_MIC1 = 0x40000; + public static final int DEVICE_IN_BUILTIN_MIC2 = 0x80000; + public static final int DEVICE_IN_MIC_ARRAY = 0x100000; + public static final int DEVICE_IN_BLUETOOTH_SCO_HEADSET = 0x200000; + public static final int DEVICE_IN_WIRED_HEADSET = 0x400000; + public static final int DEVICE_IN_AUX_DIGITAL = 0x800000; + public static final int DEVICE_IN_DEFAULT = 0x80000000; + + // device states + public static final int DEVICE_STATE_UNAVAILABLE = 0; + public static final int DEVICE_STATE_AVAILABLE = 1; + + // phone state + public static final int PHONE_STATE_OFFCALL = 0; + public static final int PHONE_STATE_RINGING = 1; + public static final int PHONE_STATE_INCALL = 2; + + // config for setForceUse + public static final int FORCE_NONE = 0; + public static final int FORCE_SPEAKER = 1; + public static final int FORCE_HEADPHONES = 2; + public static final int FORCE_BT_SCO = 3; + public static final int FORCE_BT_A2DP = 4; + public static final int FORCE_WIRED_ACCESSORY = 5; + public static final int FORCE_DEFAULT = FORCE_NONE; + + // usage for serForceUse + public static final int FOR_COMMUNICATION = 0; + public static final int FOR_MEDIA = 1; + public static final int FOR_RECORD = 2; + + public static native int setDeviceConnectionState(int device, int state, String device_address); + public static native int getDeviceConnectionState(int device, String device_address); + public static native int setPhoneState(int state); + public static native int setRingerMode(int mode, int mask); + public static native int setForceUse(int usage, int config); + public static native int getForceUse(int usage); + public static native int initStreamVolume(int stream, int indexMin, int indexMax); + public static native int setStreamVolumeIndex(int stream, int index); + public static native int getStreamVolumeIndex(int stream); } diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java index 5f1be9d..1e8d72f 100644 --- a/media/java/android/media/AudioTrack.java +++ b/media/java/android/media/AudioTrack.java @@ -120,7 +120,7 @@ public class AudioTrack public static final int ERROR_INVALID_OPERATION = -3; private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16; - private static final int ERROR_NATIVESETUP_INVALIDCHANNELCOUNT = -17; + private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17; private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18; private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19; private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20; @@ -181,10 +181,15 @@ public class AudioTrack */ private int mSampleRate = 22050; /** - * The number of input audio channels (1 is mono, 2 is stereo). + * The number of audio output channels (1 is mono, 2 is stereo). */ private int mChannelCount = 1; /** + * The audio channel mask. + */ + private int mChannels = AudioFormat.CHANNEL_OUT_MONO; + + /** * The type of the audio stream to play. See * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC} and @@ -198,7 +203,7 @@ public class AudioTrack /** * The current audio channel configuration. */ - private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO; /** * The encoding of the audio samples. * @see AudioFormat#ENCODING_PCM_8BIT @@ -235,8 +240,8 @@ public class AudioTrack * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but * not limited to) 44100, 22050 and 11025. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_OUT_MONO} and + * {@link AudioFormat#CHANNEL_OUT_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -266,7 +271,7 @@ public class AudioTrack // native initialization int initResult = native_setup(new WeakReference<AudioTrack>(this), - mStreamType, mSampleRate, mChannelCount, mAudioFormat, + mStreamType, mSampleRate, mChannels, mAudioFormat, mNativeBufferSizeInBytes, mDataLoadMode); if (initResult != SUCCESS) { loge("Error code "+initResult+" when initializing AudioTrack."); @@ -286,6 +291,7 @@ public class AudioTrack // postconditions: // mStreamType is valid // mChannelCount is valid + // mChannels is valid // mAudioFormat is valid // mSampleRate is valid // mDataLoadMode is valid @@ -298,7 +304,8 @@ public class AudioTrack && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM) && (streamType != AudioManager.STREAM_VOICE_CALL) && (streamType != AudioManager.STREAM_NOTIFICATION) - && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)) { + && (streamType != AudioManager.STREAM_BLUETOOTH_SCO) + && (streamType != AudioManager.STREAM_DTMF)) { throw (new IllegalArgumentException("Invalid stream type.")); } else { mStreamType = streamType; @@ -315,18 +322,23 @@ public class AudioTrack //-------------- // channel config + mChannelConfiguration = channelConfig; + switch (channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_OUT_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: mChannelCount = 1; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + mChannels = AudioFormat.CHANNEL_OUT_MONO; break; + case AudioFormat.CHANNEL_OUT_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: mChannelCount = 2; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + mChannels = AudioFormat.CHANNEL_OUT_STEREO; break; default: mChannelCount = 0; + mChannels = AudioFormat.CHANNEL_INVALID; mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID; throw(new IllegalArgumentException("Unsupported channel configuration.")); } @@ -452,8 +464,8 @@ public class AudioTrack /** * Returns the configured channel configuration. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} - * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}. + * See {@link AudioFormat#CHANNEL_OUT_MONO} + * and {@link AudioFormat#CHANNEL_OUT_STEREO}. */ public int getChannelConfiguration() { return mChannelConfiguration; @@ -531,8 +543,8 @@ public class AudioTrack * the expected frequency at which the buffer will be refilled with additional data to play. * @param sampleRateInHz the sample rate expressed in Hertz. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_OUT_MONO} and + * {@link AudioFormat#CHANNEL_OUT_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -544,9 +556,11 @@ public class AudioTrack static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { int channelCount = 0; switch(channelConfig) { + case AudioFormat.CHANNEL_OUT_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: channelCount = 1; break; + case AudioFormat.CHANNEL_OUT_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: channelCount = 2; break; diff --git a/media/java/android/media/ExifInterface.java b/media/java/android/media/ExifInterface.java index 6d7c0ae..6cd4b92 100644 --- a/media/java/android/media/ExifInterface.java +++ b/media/java/android/media/ExifInterface.java @@ -16,177 +16,128 @@ package android.media; -import android.util.Log; - +import java.io.IOException; +import java.text.ParsePosition; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.HashMap; import java.util.Map; /** - * Wrapper for native Exif library - * {@hide} + * This is a class for reading and writing Exif tags in a JPEG file. */ public class ExifInterface { - private static final String TAG = "ExifInterface"; - private String mFilename; - - // Constants used for the Orientation Exif tag. - public static final int ORIENTATION_UNDEFINED = 0; - public static final int ORIENTATION_NORMAL = 1; - - // Constants used for white balance - public static final int WHITEBALANCE_AUTO = 0; - public static final int WHITEBALANCE_MANUAL = 1; - - // left right reversed mirror - public static final int ORIENTATION_FLIP_HORIZONTAL = 2; - public static final int ORIENTATION_ROTATE_180 = 3; - - // upside down mirror - public static final int ORIENTATION_FLIP_VERTICAL = 4; - - // flipped about top-left <--> bottom-right axis - public static final int ORIENTATION_TRANSPOSE = 5; - - // rotate 90 cw to right it - public static final int ORIENTATION_ROTATE_90 = 6; - - // flipped about top-right <--> bottom-left axis - public static final int ORIENTATION_TRANSVERSE = 7; - - // rotate 270 to right it - public static final int ORIENTATION_ROTATE_270 = 8; // The Exif tag names public static final String TAG_ORIENTATION = "Orientation"; - - public static final String TAG_DATE_TIME_ORIGINAL = "DateTimeOriginal"; + public static final String TAG_DATETIME = "DateTime"; public static final String TAG_MAKE = "Make"; public static final String TAG_MODEL = "Model"; public static final String TAG_FLASH = "Flash"; public static final String TAG_IMAGE_WIDTH = "ImageWidth"; public static final String TAG_IMAGE_LENGTH = "ImageLength"; - public static final String TAG_GPS_LATITUDE = "GPSLatitude"; public static final String TAG_GPS_LONGITUDE = "GPSLongitude"; - public static final String TAG_GPS_LATITUDE_REF = "GPSLatitudeRef"; public static final String TAG_GPS_LONGITUDE_REF = "GPSLongitudeRef"; public static final String TAG_WHITE_BALANCE = "WhiteBalance"; - private boolean mSavedAttributes = false; - private boolean mHasThumbnail = false; - private HashMap<String, String> mCachedAttributes = null; + // Constants used for the Orientation Exif tag. + public static final int ORIENTATION_UNDEFINED = 0; + public static final int ORIENTATION_NORMAL = 1; + public static final int ORIENTATION_FLIP_HORIZONTAL = 2; // left right reversed mirror + public static final int ORIENTATION_ROTATE_180 = 3; + public static final int ORIENTATION_FLIP_VERTICAL = 4; // upside down mirror + public static final int ORIENTATION_TRANSPOSE = 5; // flipped about top-left <--> bottom-right axis + public static final int ORIENTATION_ROTATE_90 = 6; // rotate 90 cw to right it + public static final int ORIENTATION_TRANSVERSE = 7; // flipped about top-right <--> bottom-left axis + public static final int ORIENTATION_ROTATE_270 = 8; // rotate 270 to right it + + // Constants used for white balance + public static final int WHITEBALANCE_AUTO = 0; + public static final int WHITEBALANCE_MANUAL = 1; static { System.loadLibrary("exif"); } - private static ExifInterface sExifObj = null; - /** - * Since the underlying jhead native code is not thread-safe, - * ExifInterface should use singleton interface instead of public - * constructor. - */ - private static synchronized ExifInterface instance() { - if (sExifObj == null) { - sExifObj = new ExifInterface(); - } + private String mFilename; + private HashMap<String, String> mAttributes; + private boolean mHasThumbnail; - return sExifObj; - } + // Because the underlying implementation (jhead) uses static variables, + // there can only be one user at a time for the native functions (and + // they cannot keep state in the native code across function calls). We + // use sLock to serialize the accesses. + private static Object sLock = new Object(); /** - * The following 3 static methods are handy routines for atomic operation - * of underlying jhead library. It retrieves EXIF data and then release - * ExifInterface immediately. + * Reads Exif tags from the specified JPEG file. */ - public static synchronized HashMap<String, String> loadExifData(String filename) { - ExifInterface exif = instance(); - HashMap<String, String> exifData = null; - if (exif != null) { - exif.setFilename(filename); - exifData = exif.getAttributes(); - } - return exifData; - } - - public static synchronized void saveExifData(String filename, HashMap<String, String> exifData) { - ExifInterface exif = instance(); - if (exif != null) { - exif.setFilename(filename); - exif.saveAttributes(exifData); - } + public ExifInterface(String filename) throws IOException { + mFilename = filename; + loadAttributes(); } - public static synchronized byte[] getExifThumbnail(String filename) { - ExifInterface exif = instance(); - if (exif != null) { - exif.setFilename(filename); - return exif.getThumbnail(); - } - return null; + /** + * Returns the value of the specified tag or {@code null} if there + * is no such tag in the JPEG file. + * + * @param tag the name of the tag. + */ + public String getAttribute(String tag) { + return mAttributes.get(tag); } - public void setFilename(String filename) { - if (mFilename == null || !mFilename.equals(filename)) { - mFilename = filename; - mCachedAttributes = null; + /** + * Returns the integer value of the specified tag. If there is no such tag + * in the JPEG file or the value cannot be parsed as integer, return + * <var>defaultValue</var>. + * + * @param tag the name of the tag. + * @param defaultValue the value to return if the tag is not available. + */ + public int getAttributeInt(String tag, int defaultValue) { + String value = mAttributes.get(tag); + if (value == null) return defaultValue; + try { + return Integer.valueOf(value); + } catch (NumberFormatException ex) { + return defaultValue; } } /** - * Given a HashMap of Exif tags and associated values, an Exif section in - * the JPG file is created and loaded with the tag data. saveAttributes() - * is expensive because it involves copying all the JPG data from one file - * to another and deleting the old file and renaming the other. It's best - * to collect all the attributes to write and make a single call rather - * than multiple calls for each attribute. You must call "commitChanges()" - * at some point to commit the changes. + * Set the value of the specified tag. + * + * @param tag the name of the tag. + * @param value the value of the tag. */ - public void saveAttributes(HashMap<String, String> attributes) { - // format of string passed to native C code: - // "attrCnt attr1=valueLen value1attr2=value2Len value2..." - // example: - // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" - StringBuilder sb = new StringBuilder(); - int size = attributes.size(); - if (attributes.containsKey("hasThumbnail")) { - --size; - } - sb.append(size + " "); - for (Map.Entry<String, String> iter : attributes.entrySet()) { - String key = iter.getKey(); - if (key.equals("hasThumbnail")) { - // this is a fake attribute not saved as an exif tag - continue; - } - String val = iter.getValue(); - sb.append(key + "="); - sb.append(val.length() + " "); - sb.append(val); - } - String s = sb.toString(); - saveAttributesNative(mFilename, s); - commitChangesNative(mFilename); - mSavedAttributes = true; + public void setAttribute(String tag, String value) { + mAttributes.put(tag, value); } /** - * Returns a HashMap loaded with the Exif attributes of the file. The key - * is the standard tag name and the value is the tag's value: e.g. - * Model -> Nikon. Numeric values are returned as strings. + * Initialize mAttributes with the attributes from the file mFilename. + * + * mAttributes is a HashMap which stores the Exif attributes of the file. + * The key is the standard tag name and the value is the tag's value: e.g. + * Model -> Nikon. Numeric values are stored as strings. + * + * This function also initialize mHasThumbnail to indicate whether the + * file has a thumbnail inside. */ - public HashMap<String, String> getAttributes() { - if (mCachedAttributes != null) { - return mCachedAttributes; - } + private void loadAttributes() throws IOException { // format of string passed from native C code: // "attrCnt attr1=valueLen value1attr2=value2Len value2..." // example: // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" - mCachedAttributes = new HashMap<String, String>(); + mAttributes = new HashMap<String, String>(); - String attrStr = getAttributesNative(mFilename); + String attrStr; + synchronized (sLock) { + attrStr = getAttributesNative(mFilename); + } // get count int ptr = attrStr.indexOf(' '); @@ -212,116 +163,108 @@ public class ExifInterface { if (attrName.equals("hasThumbnail")) { mHasThumbnail = attrValue.equalsIgnoreCase("true"); } else { - mCachedAttributes.put(attrName, attrValue); + mAttributes.put(attrName, attrValue); } } - return mCachedAttributes; } /** - * Given a numerical white balance value, return a - * human-readable string describing it. + * Save the tag data into the JPEG file. This is expensive because it involves + * copying all the JPG data from one file to another and deleting the old file + * and renaming the other. It's best to use {@link #setAttribute(String,String)} + * to set all attributes to write and make a single call rather than multiple + * calls for each attribute. */ - public static String whiteBalanceToString(int whitebalance) { - switch (whitebalance) { - case WHITEBALANCE_AUTO: - return "Auto"; - case WHITEBALANCE_MANUAL: - return "Manual"; - default: - return ""; + public void saveAttributes() throws IOException { + // format of string passed to native C code: + // "attrCnt attr1=valueLen value1attr2=value2Len value2..." + // example: + // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" + StringBuilder sb = new StringBuilder(); + int size = mAttributes.size(); + if (mAttributes.containsKey("hasThumbnail")) { + --size; + } + sb.append(size + " "); + for (Map.Entry<String, String> iter : mAttributes.entrySet()) { + String key = iter.getKey(); + if (key.equals("hasThumbnail")) { + // this is a fake attribute not saved as an exif tag + continue; + } + String val = iter.getValue(); + sb.append(key + "="); + sb.append(val.length() + " "); + sb.append(val); + } + String s = sb.toString(); + synchronized (sLock) { + saveAttributesNative(mFilename, s); + commitChangesNative(mFilename); } } /** - * Given a numerical orientation, return a human-readable string describing - * the orientation. + * Returns true if the JPEG file has a thumbnail. */ - public static String orientationToString(int orientation) { - // TODO: this function needs to be localized and use string resource ids - // rather than strings - String orientationString; - switch (orientation) { - case ORIENTATION_NORMAL: - orientationString = "Normal"; - break; - case ORIENTATION_FLIP_HORIZONTAL: - orientationString = "Flipped horizontal"; - break; - case ORIENTATION_ROTATE_180: - orientationString = "Rotated 180 degrees"; - break; - case ORIENTATION_FLIP_VERTICAL: - orientationString = "Upside down mirror"; - break; - case ORIENTATION_TRANSPOSE: - orientationString = "Transposed"; - break; - case ORIENTATION_ROTATE_90: - orientationString = "Rotated 90 degrees"; - break; - case ORIENTATION_TRANSVERSE: - orientationString = "Transversed"; - break; - case ORIENTATION_ROTATE_270: - orientationString = "Rotated 270 degrees"; - break; - default: - orientationString = "Undefined"; - break; - } - return orientationString; + public boolean hasThumbnail() { + return mHasThumbnail; } /** - * Copies the thumbnail data out of the filename and puts it in the Exif - * data associated with the file used to create this object. You must call - * "commitChanges()" at some point to commit the changes. + * Returns the thumbnail inside the JPEG file, or {@code null} if there is no thumbnail. + * The returned data is in JPEG format and can be decoded using + * {@link android.graphics.BitmapFactory#decodeByteArray(byte[],int,int)} */ - public boolean appendThumbnail(String thumbnailFileName) { - if (!mSavedAttributes) { - throw new RuntimeException("Must call saveAttributes " - + "before calling appendThumbnail"); + public byte[] getThumbnail() { + synchronized (sLock) { + return getThumbnailNative(mFilename); } - mHasThumbnail = appendThumbnailNative(mFilename, thumbnailFileName); - return mHasThumbnail; } - public boolean hasThumbnail() { - if (!mSavedAttributes) { - getAttributes(); + /** + * Stores the latitude and longitude value in a float array. The first element is + * the latitude, and the second element is the longitude. Returns false if the + * Exif tags are not available. + */ + public boolean getLatLong(float output[]) { + String latValue = mAttributes.get(ExifInterface.TAG_GPS_LATITUDE); + String latRef = mAttributes.get(ExifInterface.TAG_GPS_LATITUDE_REF); + String lngValue = mAttributes.get(ExifInterface.TAG_GPS_LONGITUDE); + String lngRef = mAttributes.get(ExifInterface.TAG_GPS_LONGITUDE_REF); + + if (latValue != null && latRef != null && lngValue != null && lngRef != null) { + output[0] = convertRationalLatLonToFloat(latValue, latRef); + output[1] = convertRationalLatLonToFloat(lngValue, lngRef); + return true; + } else { + return false; } - return mHasThumbnail; } - public byte[] getThumbnail() { - return getThumbnailNative(mFilename); - } + private static SimpleDateFormat sFormatter = + new SimpleDateFormat("yyyy:MM:dd HH:mm:ss"); - public static float[] getLatLng(HashMap<String, String> exifData) { - if (exifData == null) { - return null; - } + /** + * Returns number of milliseconds since Jan. 1, 1970, midnight GMT. + * Returns -1 if the date time information if not available. + * @hide + */ + public long getDateTime() { + String dateTimeString = mAttributes.get(TAG_DATETIME); + if (dateTimeString == null) return -1; - String latValue = exifData.get(ExifInterface.TAG_GPS_LATITUDE); - String latRef = exifData.get(ExifInterface.TAG_GPS_LATITUDE_REF); - String lngValue = exifData.get(ExifInterface.TAG_GPS_LONGITUDE); - String lngRef = exifData.get(ExifInterface.TAG_GPS_LONGITUDE_REF); - float[] latlng = null; - - if (latValue != null && latRef != null - && lngValue != null && lngRef != null) { - latlng = new float[2]; - latlng[0] = ExifInterface.convertRationalLatLonToFloat( - latValue, latRef); - latlng[1] = ExifInterface.convertRationalLatLonToFloat( - lngValue, lngRef); + ParsePosition pos = new ParsePosition(0); + try { + Date date = sFormatter.parse(dateTimeString, pos); + if (date == null) return -1; + return date.getTime(); + } catch (IllegalArgumentException ex) { + return -1; } - - return latlng; } - public static float convertRationalLatLonToFloat( + private static float convertRationalLatLonToFloat( String rationalString, String ref) { try { String [] parts = rationalString.split(","); @@ -351,42 +294,6 @@ public class ExifInterface { } } - public static String convertRationalLatLonToDecimalString( - String rationalString, String ref, boolean usePositiveNegative) { - float result = convertRationalLatLonToFloat(rationalString, ref); - - String preliminaryResult = String.valueOf(result); - if (usePositiveNegative) { - String neg = (ref.equals("S") || ref.equals("E")) ? "-" : ""; - return neg + preliminaryResult; - } else { - return preliminaryResult + String.valueOf((char) 186) + " " - + ref; - } - } - - public static String makeLatLongString(double d) { - d = Math.abs(d); - - int degrees = (int) d; - - double remainder = d - degrees; - int minutes = (int) (remainder * 60D); - // really seconds * 1000 - int seconds = (int) (((remainder * 60D) - minutes) * 60D * 1000D); - - String retVal = degrees + "/1," + minutes + "/1," + seconds + "/1000"; - return retVal; - } - - public static String makeLatStringRef(double lat) { - return lat >= 0D ? "N" : "S"; - } - - public static String makeLonStringRef(double lon) { - return lon >= 0D ? "W" : "E"; - } - private native boolean appendThumbnailNative(String fileName, String thumbnailFileName); diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl index 9a8264f..d3d2d29 100644 --- a/media/java/android/media/IAudioService.aidl +++ b/media/java/android/media/IAudioService.aidl @@ -29,9 +29,9 @@ interface IAudioService { void setStreamVolume(int streamType, int index, int flags); - void setStreamSolo(int streamType, boolean state, IBinder cb); + void setStreamSolo(int streamType, boolean state, IBinder cb); - void setStreamMute(int streamType, boolean state, IBinder cb); + void setStreamMute(int streamType, boolean state, IBinder cb); int getStreamVolume(int streamType); @@ -46,23 +46,11 @@ interface IAudioService { int getVibrateSetting(int vibrateType); boolean shouldVibrate(int vibrateType); - - void setMicrophoneMute(boolean on); - - boolean isMicrophoneMute(); void setMode(int mode); int getMode(); - void setRouting(int mode, int routes, int mask); - - int getRouting(int mode); - - boolean isMusicActive(); - - void setParameter(String key, String value); - oneway void playSoundEffect(int effectType); oneway void playSoundEffectVolume(int effectType, float volume); @@ -72,4 +60,12 @@ interface IAudioService { oneway void unloadSoundEffects(); oneway void reloadAudioSettings(); + + void setSpeakerphoneOn(boolean on); + + boolean isSpeakerphoneOn(); + + void setBluetoothScoOn(boolean on); + + boolean isBluetoothScoOn(); } diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java index d75d81d..1570db4 100644 --- a/media/java/android/media/JetPlayer.java +++ b/media/java/android/media/JetPlayer.java @@ -89,7 +89,7 @@ public class JetPlayer // Jet rendering audio parameters private static final int JET_OUTPUT_RATE = 22050; // _SAMPLE_RATE_22050 in Android.mk private static final int JET_OUTPUT_CHANNEL_CONFIG = - AudioFormat.CHANNEL_CONFIGURATION_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk + AudioFormat.CHANNEL_OUT_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk //-------------------------------------------- diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java index 8be11df..8180a7c 100644 --- a/media/java/android/media/MediaFile.java +++ b/media/java/android/media/MediaFile.java @@ -17,6 +17,7 @@ package android.media; import android.content.ContentValues; +import android.os.SystemProperties; import android.provider.MediaStore.Audio; import android.provider.MediaStore.Images; import android.provider.MediaStore.Video; @@ -41,8 +42,9 @@ public class MediaFile { public static final int FILE_TYPE_AWB = 5; public static final int FILE_TYPE_WMA = 6; public static final int FILE_TYPE_OGG = 7; + public static final int FILE_TYPE_AAC = 8; private static final int FIRST_AUDIO_FILE_TYPE = FILE_TYPE_MP3; - private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_OGG; + private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_AAC; // MIDI file types public static final int FILE_TYPE_MID = 11; @@ -57,8 +59,9 @@ public class MediaFile { public static final int FILE_TYPE_3GPP = 23; public static final int FILE_TYPE_3GPP2 = 24; public static final int FILE_TYPE_WMV = 25; + public static final int FILE_TYPE_ASF = 26; private static final int FIRST_VIDEO_FILE_TYPE = FILE_TYPE_MP4; - private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_WMV; + private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_ASF; // Image file types public static final int FILE_TYPE_JPEG = 31; @@ -101,9 +104,12 @@ public class MediaFile { addFileType("WAV", FILE_TYPE_WAV, "audio/x-wav"); addFileType("AMR", FILE_TYPE_AMR, "audio/amr"); addFileType("AWB", FILE_TYPE_AWB, "audio/amr-wb"); - addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma"); + if (SystemProperties.getInt("ro.media.dec.aud.wma.enabled", 0) != 0) { + addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma"); + } addFileType("OGG", FILE_TYPE_OGG, "application/ogg"); addFileType("OGA", FILE_TYPE_OGG, "application/ogg"); + addFileType("AAC", FILE_TYPE_AAC, "audio/aac"); addFileType("MID", FILE_TYPE_MID, "audio/midi"); addFileType("MIDI", FILE_TYPE_MID, "audio/midi"); @@ -120,7 +126,10 @@ public class MediaFile { addFileType("3GPP", FILE_TYPE_3GPP, "video/3gpp"); addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2"); addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2"); - addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv"); + if (SystemProperties.getInt("ro.media.dec.vid.wmv.enabled", 0) != 0) { + addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv"); + addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf"); + } addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg"); addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg"); diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java index 3a49a5f..cecf4f8 100644 --- a/media/java/android/media/MediaMetadataRetriever.java +++ b/media/java/android/media/MediaMetadataRetriever.java @@ -35,6 +35,7 @@ public class MediaMetadataRetriever { static { System.loadLibrary("media_jni"); + native_init(); } // The field below is accessed by native methods @@ -211,7 +212,8 @@ public class MediaMetadataRetriever * allocated internally. */ public native void release(); - private native void native_setup(); + private native void native_setup(); + private static native void native_init(); private native final void native_finalize(); @@ -252,5 +254,6 @@ public class MediaMetadataRetriever public static final int METADATA_KEY_VIDEO_FORMAT = 18; public static final int METADATA_KEY_VIDEO_HEIGHT = 19; public static final int METADATA_KEY_VIDEO_WIDTH = 20; + public static final int METADATA_KEY_WRITER = 21; // Add more here... } diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java index 3b46d69..a676782 100644 --- a/media/java/android/media/MediaPlayer.java +++ b/media/java/android/media/MediaPlayer.java @@ -23,6 +23,7 @@ import android.net.Uri; import android.os.Handler; import android.os.Looper; import android.os.Message; +import android.os.Parcel; import android.os.ParcelFileDescriptor; import android.os.PowerManager; import android.util.Log; @@ -33,7 +34,7 @@ import android.media.AudioManager; import java.io.FileDescriptor; import java.io.IOException; - +import java.util.Set; import java.lang.ref.WeakReference; /** @@ -430,11 +431,49 @@ import java.lang.ref.WeakReference; */ public class MediaPlayer { + /** + Constant to retrieve only the new metadata since the last + call. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean METADATA_UPDATE_ONLY = true; + + /** + Constant to retrieve all the metadata. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean METADATA_ALL = false; + + /** + Constant to enable the metadata filter during retrieval. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean APPLY_METADATA_FILTER = true; + + /** + Constant to disable the metadata filter during retrieval. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean BYPASS_METADATA_FILTER = false; + static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaPlayer"; + // Name of the remote interface for the media player. Must be kept + // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE + // macro invocation in IMediaPlayer.cpp + private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer"; private int mNativeContext; // accessed by native methods private int mListenerContext; // accessed by native methods @@ -475,6 +514,45 @@ public class MediaPlayer private native void _setVideoSurface(); /** + * Create a request parcel which can be routed to the native media + * player using {@link #invoke(Parcel, Parcel)}. The Parcel + * returned has the proper InterfaceToken set. The caller should + * not overwrite that token, i.e it can only append data to the + * Parcel. + * + * @return A parcel suitable to hold a request for the native + * player. + * {@hide} + */ + public Parcel newRequest() { + Parcel parcel = Parcel.obtain(); + parcel.writeInterfaceToken(IMEDIA_PLAYER); + return parcel; + } + + /** + * Invoke a generic method on the native player using opaque + * parcels for the request and reply. Both payloads' format is a + * convention between the java caller and the native player. + * Must be called after setDataSource to make sure a native player + * exists. + * + * @param request Parcel with the data for the extension. The + * caller must use {@link #newRequest()} to get one. + * + * @param reply Output parcel with the data returned by the + * native player. + * + * @return The status code see utils/Errors.h + * {@hide} + */ + public int invoke(Parcel request, Parcel reply) { + int retcode = native_invoke(request, reply); + reply.setDataPosition(0); + return retcode; + } + + /** * Sets the SurfaceHolder to use for displaying the video portion of the media. * This call is optional. Not calling it when playing back a video will * result in only the audio track being played. @@ -838,6 +916,89 @@ public class MediaPlayer public native int getDuration(); /** + * Gets the media metadata. + * + * @param update_only controls whether the full set of available + * metadata is returned or just the set that changed since the + * last call. See {@see #METADATA_UPDATE_ONLY} and {@see + * #METADATA_ALL}. + * + * @param apply_filter if true only metadata that matches the + * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see + * #BYPASS_METADATA_FILTER}. + * + * @return The metadata, possibly empty. null if an error occured. + // FIXME: unhide. + * {@hide} + */ + public Metadata getMetadata(final boolean update_only, + final boolean apply_filter) { + Parcel reply = Parcel.obtain(); + Metadata data = new Metadata(); + + if (!native_getMetadata(update_only, apply_filter, reply)) { + reply.recycle(); + return null; + } + + // Metadata takes over the parcel, don't recycle it unless + // there is an error. + if (!data.parse(reply)) { + reply.recycle(); + return null; + } + return data; + } + + /** + * Set a filter for the metadata update notification and update + * retrieval. The caller provides 2 set of metadata keys, allowed + * and blocked. The blocked set always takes precedence over the + * allowed one. + * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as + * shorthands to allow/block all or no metadata. + * + * By default, there is no filter set. + * + * @param allow Is the set of metadata the client is interested + * in receiving new notifications for. + * @param block Is the set of metadata the client is not interested + * in receiving new notifications for. + * @return The call status code. + * + // FIXME: unhide. + * {@hide} + */ + public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) { + // Do our serialization manually instead of calling + // Parcel.writeArray since the sets are made of the same type + // we avoid paying the price of calling writeValue (used by + // writeArray) which burns an extra int per element to encode + // the type. + Parcel request = newRequest(); + + // The parcel starts already with an interface token. There + // are 2 filters. Each one starts with a 4bytes number to + // store the len followed by a number of int (4 bytes as well) + // representing the metadata type. + int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size()); + + if (request.dataCapacity() < capacity) { + request.setDataCapacity(capacity); + } + + request.writeInt(allow.size()); + for(Integer t: allow) { + request.writeInt(t); + } + request.writeInt(block.size()); + for(Integer t: block) { + request.writeInt(t); + } + return native_setMetadataFilter(request); + } + + /** * Releases resources associated with this MediaPlayer object. * It is considered good practice to call this method when you're * done using the MediaPlayer. @@ -915,8 +1076,46 @@ public class MediaPlayer */ public native Bitmap getFrameAt(int msec) throws IllegalStateException; + /** + * @param request Parcel destinated to the media player. The + * Interface token must be set to the IMediaPlayer + * one to be routed correctly through the system. + * @param reply[out] Parcel that will contain the reply. + * @return The status code. + */ + private native final int native_invoke(Parcel request, Parcel reply); + + + /** + * @param update_only If true fetch only the set of metadata that have + * changed since the last invocation of getMetadata. + * The set is built using the unfiltered + * notifications the native player sent to the + * MediaPlayerService during that period of + * time. If false, all the metadatas are considered. + * @param apply_filter If true, once the metadata set has been built based on + * the value update_only, the current filter is applied. + * @param reply[out] On return contains the serialized + * metadata. Valid only if the call was successful. + * @return The status code. + */ + private native final boolean native_getMetadata(boolean update_only, + boolean apply_filter, + Parcel reply); + + /** + * @param request Parcel with the 2 serialized lists of allowed + * metadata types followed by the one to be + * dropped. Each list starts with an integer + * indicating the number of metadata type elements. + * @return The status code. + */ + private native final int native_setMetadataFilter(Parcel request); + + private static native final void native_init(); private native final void native_setup(Object mediaplayer_this); private native final void native_finalize(); + @Override protected void finalize() { native_finalize(); } @@ -1254,6 +1453,11 @@ public class MediaPlayer */ public static final int MEDIA_INFO_NOT_SEEKABLE = 801; + /** A new set of metadata is available. + * @see android.media.MediaPlayer.OnInfoListener + */ + public static final int MEDIA_INFO_METADATA_UPDATE = 802; + /** * Interface definition of a callback to be invoked to communicate some * info and/or warning about the media or its playback. @@ -1270,6 +1474,7 @@ public class MediaPlayer * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING} * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING} * <li>{@link #MEDIA_INFO_NOT_SEEKABLE} + * <li>{@link #MEDIA_INFO_METADATA_UPDATE} * </ul> * @param extra an extra code, specific to the info. Typically * implementation dependant. @@ -1291,4 +1496,9 @@ public class MediaPlayer } private OnInfoListener mOnInfoListener; + + /** + * @hide + */ + public native static int snoop(short [] outData, int kind); } diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java index be4b489..4203cba 100644 --- a/media/java/android/media/MediaRecorder.java +++ b/media/java/android/media/MediaRecorder.java @@ -57,6 +57,7 @@ public class MediaRecorder { static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaRecorder"; @@ -134,6 +135,12 @@ public class MediaRecorder /** Voice call uplink + downlink audio source */ public static final int VOICE_CALL = 4; + + /** Microphone audio source with same orientation as camera */ + public static final int CAMCORDER = 5; + + /** Microphone audio source tuned for voice recognition */ + public static final int VOICE_RECOGNITION = 6; } /** @@ -167,7 +174,7 @@ public class MediaRecorder /** The following formats are audio only .aac or .amr formats **/ /** @deprecated Deprecated in favor of AMR_NB */ - /** @todo change link when AMR_NB is exposed. Deprecated in favor of {@link MediaRecorder.OutputFormat#AMR_NB} */ + /** TODO: change link when AMR_NB is exposed. Deprecated in favor of MediaRecorder.OutputFormat.AMR_NB */ public static final int RAW_AMR = 3; /** @hide AMR NB file format */ public static final int AMR_NB = 3; @@ -273,7 +280,7 @@ public class MediaRecorder * Gets the maximum value for audio sources. * @see android.media.MediaRecorder.AudioSource */ - public static final int getAudioSourceMax() { return AudioSource.VOICE_CALL; } + public static final int getAudioSourceMax() { return AudioSource.VOICE_RECOGNITION; } /** * Sets the video source to be used for recording. If this method is not @@ -655,6 +662,8 @@ public class MediaRecorder */ public native void release(); + private static native final void native_init(); + private native final void native_setup(Object mediarecorder_this) throws IllegalStateException; private native final void native_finalize(); diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java index 8db874a..afdc8f7 100644 --- a/media/java/android/media/MediaScanner.java +++ b/media/java/android/media/MediaScanner.java @@ -99,6 +99,7 @@ public class MediaScanner { static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaScanner"; @@ -307,10 +308,14 @@ public class MediaScanner private boolean mDefaultRingtoneSet; /** Whether the scanner has set a default sound for the notification ringtone. */ private boolean mDefaultNotificationSet; + /** Whether the scanner has set a default sound for the alarm ringtone. */ + private boolean mDefaultAlarmSet; /** The filename for the default sound for the ringer ringtone. */ private String mDefaultRingtoneFilename; /** The filename for the default sound for the notification ringtone. */ private String mDefaultNotificationFilename; + /** The filename for the default sound for the alarm ringtone. */ + private String mDefaultAlarmAlertFilename; /** * The prefix for system properties that define the default sound for * ringtones. Concatenate the name of the setting from Settings @@ -369,6 +374,8 @@ public class MediaScanner + Settings.System.RINGTONE); mDefaultNotificationFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX + Settings.System.NOTIFICATION_SOUND); + mDefaultAlarmAlertFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX + + Settings.System.ALARM_ALERT); } private MyMediaScannerClient mClient = new MyMediaScannerClient(); @@ -389,6 +396,7 @@ public class MediaScanner private String mPath; private long mLastModified; private long mFileSize; + private String mWriter; public FileCacheEntry beginFile(String path, String mimeType, long lastModified, long fileSize) { @@ -472,11 +480,14 @@ public class MediaScanner mDuration = 0; mPath = path; mLastModified = lastModified; + mWriter = null; return entry; } public void scanFile(String path, long lastModified, long fileSize) { + // This is the callback funtion from native codes. + // Log.v(TAG, "scanFile: "+path); doScanFile(path, null, lastModified, fileSize, false); } @@ -484,6 +495,22 @@ public class MediaScanner doScanFile(path, mimeType, lastModified, fileSize, false); } + private boolean isMetadataSupported(int fileType) { + if (mFileType == MediaFile.FILE_TYPE_MP3 || + mFileType == MediaFile.FILE_TYPE_MP4 || + mFileType == MediaFile.FILE_TYPE_M4A || + mFileType == MediaFile.FILE_TYPE_3GPP || + mFileType == MediaFile.FILE_TYPE_3GPP2 || + mFileType == MediaFile.FILE_TYPE_OGG || + mFileType == MediaFile.FILE_TYPE_AAC || + mFileType == MediaFile.FILE_TYPE_MID || + mFileType == MediaFile.FILE_TYPE_WMA) { + // we only extract metadata from MP3, M4A, OGG, MID, AAC and WMA files. + // check MP4 files, to determine if they contain only audio. + return true; + } + return false; + } public Uri doScanFile(String path, String mimeType, long lastModified, long fileSize, boolean scanAlways) { Uri result = null; // long t1 = System.currentTimeMillis(); @@ -499,16 +526,7 @@ public class MediaScanner boolean music = (lowpath.indexOf(MUSIC_DIR) > 0) || (!ringtones && !notifications && !alarms && !podcasts); - if (mFileType == MediaFile.FILE_TYPE_MP3 || - mFileType == MediaFile.FILE_TYPE_MP4 || - mFileType == MediaFile.FILE_TYPE_M4A || - mFileType == MediaFile.FILE_TYPE_3GPP || - mFileType == MediaFile.FILE_TYPE_3GPP2 || - mFileType == MediaFile.FILE_TYPE_OGG || - mFileType == MediaFile.FILE_TYPE_MID || - mFileType == MediaFile.FILE_TYPE_WMA) { - // we only extract metadata from MP3, M4A, OGG, MID and WMA files. - // check MP4 files, to determine if they contain only audio. + if( isMetadataSupported(mFileType) ) { processFile(path, mimeType, this); } else if (MediaFile.isImageFileType(mFileType)) { // we used to compute the width and height but it's not worth it @@ -586,10 +604,19 @@ public class MediaScanner mTrack = (num * 1000) + (mTrack % 1000); } else if (name.equalsIgnoreCase("duration")) { mDuration = parseSubstring(value, 0, 0); + } else if (name.equalsIgnoreCase("writer") || name.startsWith("writer;")) { + mWriter = value.trim(); } } public void setMimeType(String mimeType) { + if ("audio/mp4".equals(mMimeType) && + mimeType.startsWith("video")) { + // for feature parity with Donut, we force m4a files to keep the + // audio/mp4 mimetype, even if they are really "enhanced podcasts" + // with a video track + return; + } mMimeType = mimeType; mFileType = MediaFile.getFileTypeForMimeType(mimeType); } @@ -701,14 +728,45 @@ public class MediaScanner values.put(Audio.Media.IS_MUSIC, music); values.put(Audio.Media.IS_PODCAST, podcasts); } else if (mFileType == MediaFile.FILE_TYPE_JPEG) { - HashMap<String, String> exifData = - ExifInterface.loadExifData(entry.mPath); - if (exifData != null) { - float[] latlng = ExifInterface.getLatLng(exifData); - if (latlng != null) { + ExifInterface exif = null; + try { + exif = new ExifInterface(entry.mPath); + } catch (IOException ex) { + // exif is null + } + if (exif != null) { + float[] latlng = new float[2]; + if (exif.getLatLong(latlng)) { values.put(Images.Media.LATITUDE, latlng[0]); values.put(Images.Media.LONGITUDE, latlng[1]); } + + long time = exif.getDateTime(); + if (time != -1) { + values.put(Images.Media.DATE_TAKEN, time); + } + + int orientation = exif.getAttributeInt( + ExifInterface.TAG_ORIENTATION, -1); + if (orientation != -1) { + // We only recognize a subset of orientation tag values. + int degree; + switch(orientation) { + case ExifInterface.ORIENTATION_ROTATE_90: + degree = 90; + break; + case ExifInterface.ORIENTATION_ROTATE_180: + degree = 180; + break; + case ExifInterface.ORIENTATION_ROTATE_270: + degree = 270; + break; + default: + degree = 0; + break; + } + values.put(Images.Media.ORIENTATION, degree); + } } } @@ -779,6 +837,12 @@ public class MediaScanner setSettingIfNotSet(Settings.System.RINGTONE, tableUri, rowId); mDefaultRingtoneSet = true; } + } else if (alarms && !mDefaultAlarmSet) { + if (TextUtils.isEmpty(mDefaultAlarmAlertFilename) || + doesPathHaveFilename(entry.mPath, mDefaultAlarmAlertFilename)) { + setSettingIfNotSet(Settings.System.ALARM_ALERT, tableUri, rowId); + mDefaultAlarmSet = true; + } } return result; @@ -803,6 +867,22 @@ public class MediaScanner } } + public void addNoMediaFolder(String path) { + ContentValues values = new ContentValues(); + values.put(MediaStore.Images.ImageColumns.DATA, ""); + String [] pathSpec = new String[] {path + '%'}; + try { + mMediaProvider.update(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + mMediaProvider.update(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + mMediaProvider.update(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + } catch (RemoteException e) { + throw new RuntimeException(); + } + } + }; // end of anonymous MediaScannerClient instance private void prescan(String filePath) throws RemoteException { @@ -1200,7 +1280,8 @@ public class MediaScanner } } - if (bestMatch == null) { + // if the match is not for an audio file, bail out + if (bestMatch == null || ! mAudioUri.equals(bestMatch.mTableUri)) { return false; } @@ -1412,6 +1493,7 @@ public class MediaScanner public native byte[] extractAlbumArt(FileDescriptor fd); + private static native final void native_init(); private native final void native_setup(); private native final void native_finalize(); @Override diff --git a/media/java/android/media/MediaScannerClient.java b/media/java/android/media/MediaScannerClient.java index cf1a8da..258c3b4 100644 --- a/media/java/android/media/MediaScannerClient.java +++ b/media/java/android/media/MediaScannerClient.java @@ -25,11 +25,13 @@ public interface MediaScannerClient public void scanFile(String path, String mimeType, long lastModified, long fileSize); + public void addNoMediaFolder(String path); + /** * Called by native code to return metadata extracted from media files. */ public void handleStringTag(String name, String value); - + /** * Called by native code to return mime type extracted from DRM content. */ diff --git a/media/java/android/media/Metadata.java b/media/java/android/media/Metadata.java new file mode 100644 index 0000000..bd25da2 --- /dev/null +++ b/media/java/android/media/Metadata.java @@ -0,0 +1,418 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.graphics.Bitmap; +import android.os.Parcel; +import android.util.Log; + +import java.util.Calendar; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Set; +import java.util.TimeZone; + + +/** + Class to hold the media's metadata. Metadata are used + for human consumption and can be embedded in the media (e.g + shoutcast) or available from an external source. The source can be + local (e.g thumbnail stored in the DB) or remote (e.g caption + server). + + Metadata is like a Bundle. It is sparse and each key can occur at + most once. The key is an integer and the value is the actual metadata. + + The caller is expected to know the type of the metadata and call + the right get* method to fetch its value. + + // FIXME: unhide. + {@hide} + */ +public class Metadata +{ + // The metadata are keyed using integers rather than more heavy + // weight strings. We considered using Bundle to ship the metadata + // between the native layer and the java layer but dropped that + // option since keeping in sync a native implementation of Bundle + // and the java one would be too burdensome. Besides Bundle uses + // String for its keys. + // The key range [0 8192) is reserved for the system. + // + // We manually serialize the data in Parcels. For large memory + // blob (bitmaps, raw pictures) we use MemoryFile which allow the + // client to make the data purge-able once it is done with it. + // + + public static final int ANY = 0; // Never used for metadata returned, only for filtering. + // Keep in sync with kAny in MediaPlayerService.cpp + + // TODO: Should we use numbers compatible with the metadata retriever? + public static final int TITLE = 1; // String + public static final int COMMENT = 2; // String + public static final int COPYRIGHT = 3; // String + public static final int ALBUM = 4; // String + public static final int ARTIST = 5; // String + public static final int AUTHOR = 6; // String + public static final int COMPOSER = 7; // String + public static final int GENRE = 8; // String + public static final int DATE = 9; // Date + public static final int DURATION = 10; // Integer(millisec) + public static final int CD_TRACK_NUM = 11; // Integer 1-based + public static final int CD_TRACK_MAX = 12; // Integer + public static final int RATING = 13; // String + public static final int ALBUM_ART = 14; // byte[] + public static final int VIDEO_FRAME = 15; // Bitmap + public static final int CAPTION = 16; // TimedText + + public static final int BIT_RATE = 17; // Integer, Aggregate rate of + // all the streams in bps. + + public static final int AUDIO_BIT_RATE = 18; // Integer, bps + public static final int VIDEO_BIT_RATE = 19; // Integer, bps + public static final int AUDIO_SAMPLE_RATE = 20; // Integer, Hz + public static final int VIDEO_FRAME_RATE = 21; // Integer, Hz + + // See RFC2046 and RFC4281. + public static final int MIME_TYPE = 22; // String + public static final int AUDIO_CODEC = 23; // String + public static final int VIDEO_CODEC = 24; // String + + public static final int VIDEO_HEIGHT = 25; // Integer + public static final int VIDEO_WIDTH = 26; // Integer + public static final int NUM_TRACKS = 27; // Integer + public static final int DRM_CRIPPLED = 28; // Boolean + + // Playback capabilities. + public static final int PAUSE_AVAILABLE = 29; // Boolean + public static final int SEEK_BACKWARD_AVAILABLE = 30; // Boolean + public static final int SEEK_FORWARD_AVAILABLE = 31; // Boolean + + private static final int LAST_SYSTEM = 31; + private static final int FIRST_CUSTOM = 8192; + + // Shorthands to set the MediaPlayer's metadata filter. + public static final Set<Integer> MATCH_NONE = Collections.EMPTY_SET; + public static final Set<Integer> MATCH_ALL = Collections.singleton(ANY); + + public static final int STRING_VAL = 1; + public static final int INTEGER_VAL = 2; + public static final int BOOLEAN_VAL = 3; + public static final int LONG_VAL = 4; + public static final int DOUBLE_VAL = 5; + public static final int TIMED_TEXT_VAL = 6; + public static final int DATE_VAL = 7; + public static final int BYTE_ARRAY_VAL = 8; + // FIXME: misses a type for shared heap is missing (MemoryFile). + // FIXME: misses a type for bitmaps. + private static final int LAST_TYPE = 8; + + private static final String TAG = "media.Metadata"; + private static final int kInt32Size = 4; + private static final int kMetaHeaderSize = 2 * kInt32Size; // size + marker + private static final int kRecordHeaderSize = 3 * kInt32Size; // size + id + type + + private static final int kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A' + + // After a successful parsing, set the parcel with the serialized metadata. + private Parcel mParcel; + + // Map to associate a Metadata key (e.g TITLE) with the offset of + // the record's payload in the parcel. + // Used to look up if a key was present too. + // Key: Metadata ID + // Value: Offset of the metadata type field in the record. + private final HashMap<Integer, Integer> mKeyToPosMap = + new HashMap<Integer, Integer>(); + + /** + * Helper class to hold a triple (time, duration, text). Can be used to + * implement caption. + */ + public class TimedText { + private Date mTime; + private int mDuration; // millisec + private String mText; + + public TimedText(Date time, int duration, String text) { + mTime = time; + mDuration = duration; + mText = text; + } + + public String toString() { + StringBuilder res = new StringBuilder(80); + res.append(mTime).append("-").append(mDuration) + .append(":").append(mText); + return res.toString(); + } + } + + public Metadata() { } + + /** + * Go over all the records, collecting metadata keys and records' + * type field offset in the Parcel. These are stored in + * mKeyToPosMap for latter retrieval. + * Format of a metadata record: + <pre> + 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | record size | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata key | // TITLE + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata type | // STRING_VAL + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | + | .... metadata payload .... | + | | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + </pre> + * @param parcel With the serialized records. + * @param bytesLeft How many bytes in the parcel should be processed. + * @return false if an error occurred during parsing. + */ + private boolean scanAllRecords(Parcel parcel, int bytesLeft) { + int recCount = 0; + boolean error = false; + + mKeyToPosMap.clear(); + while (bytesLeft > kRecordHeaderSize) { + final int start = parcel.dataPosition(); + // Check the size. + final int size = parcel.readInt(); + + if (size <= kRecordHeaderSize) { // at least 1 byte should be present. + Log.e(TAG, "Record is too short"); + error = true; + break; + } + + // Check the metadata key. + final int metadataId = parcel.readInt(); + if (!checkMetadataId(metadataId)) { + error = true; + break; + } + + // Store the record offset which points to the type + // field so we can later on read/unmarshall the record + // payload. + if (mKeyToPosMap.containsKey(metadataId)) { + Log.e(TAG, "Duplicate metadata ID found"); + error = true; + break; + } + + mKeyToPosMap.put(metadataId, parcel.dataPosition()); + + // Check the metadata type. + final int metadataType = parcel.readInt(); + if (metadataType <= 0 || metadataType > LAST_TYPE) { + Log.e(TAG, "Invalid metadata type " + metadataType); + error = true; + break; + } + + // Skip to the next one. + parcel.setDataPosition(start + size); + bytesLeft -= size; + ++recCount; + } + + if (0 != bytesLeft || error) { + Log.e(TAG, "Ran out of data or error on record " + recCount); + mKeyToPosMap.clear(); + return false; + } else { + return true; + } + } + + /** + * Check a parcel containing metadata is well formed. The header + * is checked as well as the individual records format. However, the + * data inside the record is not checked because we do lazy access + * (we check/unmarshall only data the user asks for.) + * + * Format of a metadata parcel: + <pre> + 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata total size | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | 'M' | 'E' | 'T' | 'A' | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | + | .... metadata records .... | + | | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + </pre> + * + * @param parcel With the serialized data. Metadata keeps a + * reference on it to access it later on. The caller + * should not modify the parcel after this call (and + * not call recycle on it.) + * @return false if an error occurred. + */ + public boolean parse(Parcel parcel) { + if (parcel.dataAvail() < kMetaHeaderSize) { + Log.e(TAG, "Not enough data " + parcel.dataAvail()); + return false; + } + + final int pin = parcel.dataPosition(); // to roll back in case of errors. + final int size = parcel.readInt(); + + // The extra kInt32Size below is to account for the int32 'size' just read. + if (parcel.dataAvail() + kInt32Size < size || size < kMetaHeaderSize) { + Log.e(TAG, "Bad size " + size + " avail " + parcel.dataAvail() + " position " + pin); + parcel.setDataPosition(pin); + return false; + } + + // Checks if the 'M' 'E' 'T' 'A' marker is present. + final int kShouldBeMetaMarker = parcel.readInt(); + if (kShouldBeMetaMarker != kMetaMarker ) { + Log.e(TAG, "Marker missing " + Integer.toHexString(kShouldBeMetaMarker)); + parcel.setDataPosition(pin); + return false; + } + + // Scan the records to collect metadata ids and offsets. + if (!scanAllRecords(parcel, size - kMetaHeaderSize)) { + parcel.setDataPosition(pin); + return false; + } + mParcel = parcel; + return true; + } + + /** + * @return The set of metadata ID found. + */ + public Set<Integer> keySet() { + return mKeyToPosMap.keySet(); + } + + /** + * @return true if a value is present for the given key. + */ + public boolean has(final int metadataId) { + if (!checkMetadataId(metadataId)) { + throw new IllegalArgumentException("Invalid key: " + metadataId); + } + return mKeyToPosMap.containsKey(metadataId); + } + + // Accessors. + // Caller must make sure the key is present using the {@code has} + // method otherwise a RuntimeException will occur. + + public String getString(final int key) { + checkType(key, STRING_VAL); + return mParcel.readString(); + } + + public int getInt(final int key) { + checkType(key, INTEGER_VAL); + return mParcel.readInt(); + } + + public boolean getBoolean(final int key) { + checkType(key, BOOLEAN_VAL); + return mParcel.readInt() == 1; + } + + public long getLong(final int key) { + checkType(key, LONG_VAL); + return mParcel.readLong(); + } + + public double getDouble(final int key) { + checkType(key, DOUBLE_VAL); + return mParcel.readDouble(); + } + + public byte[] getByteArray(final int key) { + checkType(key, BYTE_ARRAY_VAL); + return mParcel.createByteArray(); + } + + public Date getDate(final int key) { + checkType(key, DATE_VAL); + final long timeSinceEpoch = mParcel.readLong(); + final String timeZone = mParcel.readString(); + + if (timeZone.length() == 0) { + return new Date(timeSinceEpoch); + } else { + TimeZone tz = TimeZone.getTimeZone(timeZone); + Calendar cal = Calendar.getInstance(tz); + + cal.setTimeInMillis(timeSinceEpoch); + return cal.getTime(); + } + } + + public TimedText getTimedText(final int key) { + checkType(key, TIMED_TEXT_VAL); + final Date startTime = new Date(mParcel.readLong()); // epoch + final int duration = mParcel.readInt(); // millisec + + return new TimedText(startTime, + duration, + mParcel.readString()); + } + + // @return the last available system metadata id. Ids are + // 1-indexed. + public static int lastSytemId() { return LAST_SYSTEM; } + + // @return the first available cutom metadata id. + public static int firstCustomId() { return FIRST_CUSTOM; } + + // @return the last value of known type. Types are 1-indexed. + public static int lastType() { return LAST_TYPE; } + + // Check val is either a system id or a custom one. + // @param val Metadata key to test. + // @return true if it is in a valid range. + private boolean checkMetadataId(final int val) { + if (val <= ANY || (LAST_SYSTEM < val && val < FIRST_CUSTOM)) { + Log.e(TAG, "Invalid metadata ID " + val); + return false; + } + return true; + } + + // Check the type of the data match what is expected. + private void checkType(final int key, final int expectedType) { + final int pos = mKeyToPosMap.get(key); + + mParcel.setDataPosition(pos); + + final int type = mParcel.readInt(); + if (type != expectedType) { + throw new IllegalStateException("Wrong type " + expectedType + " but got " + type); + } + } +} diff --git a/media/java/android/media/MiniThumbFile.java b/media/java/android/media/MiniThumbFile.java new file mode 100644 index 0000000..f6e6317 --- /dev/null +++ b/media/java/android/media/MiniThumbFile.java @@ -0,0 +1,274 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.graphics.Bitmap; +import android.media.ThumbnailUtil; +import android.net.Uri; +import android.os.Environment; +import android.util.Log; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.util.Hashtable; + +/** + * This class handles the mini-thumb file. A mini-thumb file consists + * of blocks, indexed by id. Each block has BYTES_PER_MINTHUMB bytes in the + * following format: + * + * 1 byte status (0 = empty, 1 = mini-thumb available) + * 8 bytes magic (a magic number to match what's in the database) + * 4 bytes data length (LEN) + * LEN bytes jpeg data + * (the remaining bytes are unused) + * + * @hide This file is shared between MediaStore and MediaProvider and should remained internal use + * only. + */ +public class MiniThumbFile { + private static final String TAG = "MiniThumbFile"; + private static final int MINI_THUMB_DATA_FILE_VERSION = 3; + public static final int BYTES_PER_MINTHUMB = 10000; + private static final int HEADER_SIZE = 1 + 8 + 4; + private Uri mUri; + private RandomAccessFile mMiniThumbFile; + private FileChannel mChannel; + private ByteBuffer mBuffer; + private static Hashtable<String, MiniThumbFile> sThumbFiles = + new Hashtable<String, MiniThumbFile>(); + + /** + * We store different types of thumbnails in different files. To remain backward compatibility, + * we should hashcode of content://media/external/images/media remains the same. + */ + public static synchronized void reset() { + for (MiniThumbFile file : sThumbFiles.values()) { + file.deactivate(); + } + sThumbFiles.clear(); + } + + public static synchronized MiniThumbFile instance(Uri uri) { + String type = uri.getPathSegments().get(1); + MiniThumbFile file = sThumbFiles.get(type); + // Log.v(TAG, "get minithumbfile for type: "+type); + if (file == null) { + file = new MiniThumbFile( + Uri.parse("content://media/external/" + type + "/media")); + sThumbFiles.put(type, file); + } + + return file; + } + + private String randomAccessFilePath(int version) { + String directoryName = + Environment.getExternalStorageDirectory().toString() + + "/DCIM/.thumbnails"; + return directoryName + "/.thumbdata" + version + "-" + mUri.hashCode(); + } + + private void removeOldFile() { + String oldPath = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION - 1); + File oldFile = new File(oldPath); + if (oldFile.exists()) { + try { + oldFile.delete(); + } catch (SecurityException ex) { + // ignore + } + } + } + + private RandomAccessFile miniThumbDataFile() { + if (mMiniThumbFile == null) { + removeOldFile(); + String path = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION); + File directory = new File(path).getParentFile(); + if (!directory.isDirectory()) { + if (!directory.mkdirs()) { + Log.e(TAG, "Unable to create .thumbnails directory " + + directory.toString()); + } + } + File f = new File(path); + try { + mMiniThumbFile = new RandomAccessFile(f, "rw"); + } catch (IOException ex) { + // Open as read-only so we can at least read the existing + // thumbnails. + try { + mMiniThumbFile = new RandomAccessFile(f, "r"); + } catch (IOException ex2) { + // ignore exception + } + } + if (mMiniThumbFile != null) { + mChannel = mMiniThumbFile.getChannel(); + } + } + return mMiniThumbFile; + } + + public MiniThumbFile(Uri uri) { + mUri = uri; + mBuffer = ByteBuffer.allocateDirect(BYTES_PER_MINTHUMB); + } + + public synchronized void deactivate() { + if (mMiniThumbFile != null) { + try { + mMiniThumbFile.close(); + mMiniThumbFile = null; + } catch (IOException ex) { + // ignore exception + } + } + } + + // Get the magic number for the specified id in the mini-thumb file. + // Returns 0 if the magic is not available. + public synchronized long getMagic(long id) { + // check the mini thumb file for the right data. Right is + // defined as having the right magic number at the offset + // reserved for this "id". + RandomAccessFile r = miniThumbDataFile(); + if (r != null) { + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + mBuffer.clear(); + mBuffer.limit(1 + 8); + + lock = mChannel.lock(pos, 1 + 8, true); + // check that we can read the following 9 bytes + // (1 for the "status" and 8 for the long) + if (mChannel.read(mBuffer, pos) == 9) { + mBuffer.position(0); + if (mBuffer.get() == 1) { + return mBuffer.getLong(); + } + } + } catch (IOException ex) { + Log.v(TAG, "Got exception checking file magic: ", ex); + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "Got exception when reading magic, id = " + id + + ", disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + } + return 0; + } + + public synchronized void saveMiniThumbToFile(byte[] data, long id, long magic) + throws IOException { + RandomAccessFile r = miniThumbDataFile(); + if (r == null) return; + + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + if (data != null) { + if (data.length > BYTES_PER_MINTHUMB - HEADER_SIZE) { + // not enough space to store it. + return; + } + mBuffer.clear(); + mBuffer.put((byte) 1); + mBuffer.putLong(magic); + mBuffer.putInt(data.length); + mBuffer.put(data); + mBuffer.flip(); + + lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, false); + mChannel.write(mBuffer, pos); + } + } catch (IOException ex) { + Log.e(TAG, "couldn't save mini thumbnail data for " + + id + "; ", ex); + throw ex; + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "couldn't save mini thumbnail data for " + + id + "; disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + } + + /** + * Gallery app can use this method to retrieve mini-thumbnail. Full size + * images share the same IDs with their corresponding thumbnails. + * + * @param id the ID of the image (same of full size image). + * @param data the buffer to store mini-thumbnail. + */ + public synchronized byte [] getMiniThumbFromFile(long id, byte [] data) { + RandomAccessFile r = miniThumbDataFile(); + if (r == null) return null; + + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + mBuffer.clear(); + lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, true); + int size = mChannel.read(mBuffer, pos); + if (size > 1 + 8 + 4) { // flag, magic, length + mBuffer.position(0); + byte flag = mBuffer.get(); + long magic = mBuffer.getLong(); + int length = mBuffer.getInt(); + + if (size >= 1 + 8 + 4 + length && data.length >= length) { + mBuffer.get(data, 0, length); + return data; + } + } + } catch (IOException ex) { + Log.w(TAG, "got exception when reading thumbnail id=" + id + ", exception: " + ex); + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "Got exception when reading thumbnail, id = " + id + + ", disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + return null; + } +} diff --git a/media/java/android/media/RingtoneManager.java b/media/java/android/media/RingtoneManager.java index 42edae6..8481410 100644 --- a/media/java/android/media/RingtoneManager.java +++ b/media/java/android/media/RingtoneManager.java @@ -122,8 +122,9 @@ public class RingtoneManager { * current ringtone, which will be used to show a checkmark next to the item * for this {@link Uri}. If showing an item for "Default" (@see * {@link #EXTRA_RINGTONE_SHOW_DEFAULT}), this can also be one of - * {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" item + * {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" item * checked. * * @see #ACTION_RINGTONE_PICKER @@ -134,8 +135,9 @@ public class RingtoneManager { /** * Given to the ringtone picker as a {@link Uri}. The {@link Uri} of the * ringtone to play when the user attempts to preview the "Default" - * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" point to + * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" point to * the current sound for the given default sound type. If you are showing a * ringtone picker for some other type of sound, you are free to provide any * {@link Uri} here. @@ -163,8 +165,9 @@ public class RingtoneManager { * <p> * It will be one of: * <li> the picked ringtone, - * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} if the default was chosen, + * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} if the default was chosen, * <li> null if the "Silent" item was picked. * * @see #ACTION_RINGTONE_PICKER @@ -602,21 +605,6 @@ public class RingtoneManager { Log.e(TAG, "Failed to open ringtone " + ringtoneUri); } - // Ringtone doesn't exist, use the fallback ringtone. - try { - AssetFileDescriptor afd = context.getResources().openRawResourceFd( - com.android.internal.R.raw.fallbackring); - if (afd != null) { - Ringtone r = new Ringtone(context); - r.open(afd); - afd.close(); - return r; - } - } catch (Exception ex) { - } - - // we should never get here - Log.e(TAG, "unable to find a usable ringtone"); return null; } @@ -627,15 +615,16 @@ public class RingtoneManager { * * @param context A context used for querying. * @param type The type whose default sound should be returned. One of - * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}. + * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or + * {@link #TYPE_ALARM}. * @return A {@link Uri} pointing to the default sound for the sound type. * @see #setActualDefaultRingtoneUri(Context, int, Uri) */ public static Uri getActualDefaultRingtoneUri(Context context, int type) { String setting = getSettingForType(type); if (setting == null) return null; - final String uriString = Settings.System.getString(context.getContentResolver(), setting); - return uriString != null ? Uri.parse(uriString) : getValidRingtoneUri(context); + final String uriString = Settings.System.getString(context.getContentResolver(), setting); + return uriString != null ? Uri.parse(uriString) : null; } /** @@ -643,14 +632,16 @@ public class RingtoneManager { * * @param context A context used for querying. * @param type The type whose default sound should be set. One of - * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}. + * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or + * {@link #TYPE_ALARM}. * @param ringtoneUri A {@link Uri} pointing to the default sound to set. * @see #getActualDefaultRingtoneUri(Context, int) */ public static void setActualDefaultRingtoneUri(Context context, int type, Uri ringtoneUri) { String setting = getSettingForType(type); if (setting == null) return; - Settings.System.putString(context.getContentResolver(), setting, ringtoneUri.toString()); + Settings.System.putString(context.getContentResolver(), setting, + ringtoneUri != null ? ringtoneUri.toString() : null); } private static String getSettingForType(int type) { @@ -658,6 +649,8 @@ public class RingtoneManager { return Settings.System.RINGTONE; } else if ((type & TYPE_NOTIFICATION) != 0) { return Settings.System.NOTIFICATION_SOUND; + } else if ((type & TYPE_ALARM) != 0) { + return Settings.System.ALARM_ALERT; } else { return null; } @@ -677,8 +670,9 @@ public class RingtoneManager { * Returns the type of a default {@link Uri}. * * @param defaultRingtoneUri The default {@link Uri}. For example, - * {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI}. + * {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI}. * @return The type of the defaultRingtoneUri, or -1. */ public static int getDefaultType(Uri defaultRingtoneUri) { @@ -688,6 +682,8 @@ public class RingtoneManager { return TYPE_RINGTONE; } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_NOTIFICATION_URI)) { return TYPE_NOTIFICATION; + } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_ALARM_ALERT_URI)) { + return TYPE_ALARM; } else { return -1; } @@ -707,6 +703,8 @@ public class RingtoneManager { return Settings.System.DEFAULT_RINGTONE_URI; } else if ((type & TYPE_NOTIFICATION) != 0) { return Settings.System.DEFAULT_NOTIFICATION_URI; + } else if ((type & TYPE_ALARM) != 0) { + return Settings.System.DEFAULT_ALARM_ALERT_URI; } else { return null; } diff --git a/media/java/android/media/ThumbnailUtil.java b/media/java/android/media/ThumbnailUtil.java new file mode 100644 index 0000000..f9d69fb --- /dev/null +++ b/media/java/android/media/ThumbnailUtil.java @@ -0,0 +1,544 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.net.Uri; +import android.os.ParcelFileDescriptor; +import android.provider.BaseColumns; +import android.provider.MediaStore.Images; +import android.provider.MediaStore.Images.Thumbnails; +import android.util.Log; + +import android.content.ContentResolver; +import android.content.ContentUris; +import android.content.ContentValues; +import android.database.Cursor; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.graphics.Rect; +import android.media.MediaMetadataRetriever; + +import java.io.ByteArrayOutputStream; +import java.io.FileDescriptor; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; + +/** + * Thumbnail generation routines for media provider. This class should only be used internaly. + * {@hide} THIS IS NOT FOR PUBLIC API. + */ + +public class ThumbnailUtil { + private static final String TAG = "ThumbnailUtil"; + //Whether we should recycle the input (unless the output is the input). + public static final boolean RECYCLE_INPUT = true; + public static final boolean NO_RECYCLE_INPUT = false; + public static final boolean ROTATE_AS_NEEDED = true; + public static final boolean NO_ROTATE = false; + public static final boolean USE_NATIVE = true; + public static final boolean NO_NATIVE = false; + + public static final int THUMBNAIL_TARGET_SIZE = 320; + public static final int MINI_THUMB_TARGET_SIZE = 96; + public static final int THUMBNAIL_MAX_NUM_PIXELS = 512 * 384; + public static final int MINI_THUMB_MAX_NUM_PIXELS = 128 * 128; + public static final int UNCONSTRAINED = -1; + + // Returns Options that set the native alloc flag for Bitmap decode. + public static BitmapFactory.Options createNativeAllocOptions() { + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inNativeAlloc = true; + return options; + } + /** + * Make a bitmap from a given Uri. + * + * @param uri + */ + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr) { + return makeBitmap(minSideLength, maxNumOfPixels, uri, cr, + NO_NATIVE); + } + + /* + * Compute the sample size as a function of minSideLength + * and maxNumOfPixels. + * minSideLength is used to specify that minimal width or height of a + * bitmap. + * maxNumOfPixels is used to specify the maximal size in pixels that is + * tolerable in terms of memory usage. + * + * The function returns a sample size based on the constraints. + * Both size and minSideLength can be passed in as IImage.UNCONSTRAINED, + * which indicates no care of the corresponding constraint. + * The functions prefers returning a sample size that + * generates a smaller bitmap, unless minSideLength = IImage.UNCONSTRAINED. + * + * Also, the function rounds up the sample size to a power of 2 or multiple + * of 8 because BitmapFactory only honors sample size this way. + * For example, BitmapFactory downsamples an image by 2 even though the + * request is 3. So we round up the sample size to avoid OOM. + */ + public static int computeSampleSize(BitmapFactory.Options options, + int minSideLength, int maxNumOfPixels) { + int initialSize = computeInitialSampleSize(options, minSideLength, + maxNumOfPixels); + + int roundedSize; + if (initialSize <= 8 ) { + roundedSize = 1; + while (roundedSize < initialSize) { + roundedSize <<= 1; + } + } else { + roundedSize = (initialSize + 7) / 8 * 8; + } + + return roundedSize; + } + + private static int computeInitialSampleSize(BitmapFactory.Options options, + int minSideLength, int maxNumOfPixels) { + double w = options.outWidth; + double h = options.outHeight; + + int lowerBound = (maxNumOfPixels == UNCONSTRAINED) ? 1 : + (int) Math.ceil(Math.sqrt(w * h / maxNumOfPixels)); + int upperBound = (minSideLength == UNCONSTRAINED) ? 128 : + (int) Math.min(Math.floor(w / minSideLength), + Math.floor(h / minSideLength)); + + if (upperBound < lowerBound) { + // return the larger one when there is no overlapping zone. + return lowerBound; + } + + if ((maxNumOfPixels == UNCONSTRAINED) && + (minSideLength == UNCONSTRAINED)) { + return 1; + } else if (minSideLength == UNCONSTRAINED) { + return lowerBound; + } else { + return upperBound; + } + } + + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr, boolean useNative) { + ParcelFileDescriptor input = null; + try { + input = cr.openFileDescriptor(uri, "r"); + BitmapFactory.Options options = null; + if (useNative) { + options = createNativeAllocOptions(); + } + return makeBitmap(minSideLength, maxNumOfPixels, uri, cr, input, + options); + } catch (IOException ex) { + Log.e(TAG, "", ex); + return null; + } finally { + closeSilently(input); + } + } + + // Rotates the bitmap by the specified degree. + // If a new bitmap is created, the original bitmap is recycled. + public static Bitmap rotate(Bitmap b, int degrees) { + if (degrees != 0 && b != null) { + Matrix m = new Matrix(); + m.setRotate(degrees, + (float) b.getWidth() / 2, (float) b.getHeight() / 2); + try { + Bitmap b2 = Bitmap.createBitmap( + b, 0, 0, b.getWidth(), b.getHeight(), m, true); + if (b != b2) { + b.recycle(); + b = b2; + } + } catch (OutOfMemoryError ex) { + // We have no memory to rotate. Return the original bitmap. + } + } + return b; + } + + private static void closeSilently(ParcelFileDescriptor c) { + if (c == null) return; + try { + c.close(); + } catch (Throwable t) { + // do nothing + } + } + + private static ParcelFileDescriptor makeInputStream( + Uri uri, ContentResolver cr) { + try { + return cr.openFileDescriptor(uri, "r"); + } catch (IOException ex) { + return null; + } + } + + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr, ParcelFileDescriptor pfd, + BitmapFactory.Options options) { + Bitmap b = null; + try { + if (pfd == null) pfd = makeInputStream(uri, cr); + if (pfd == null) return null; + if (options == null) options = new BitmapFactory.Options(); + + FileDescriptor fd = pfd.getFileDescriptor(); + options.inSampleSize = 1; + options.inJustDecodeBounds = true; + BitmapFactory.decodeFileDescriptor(fd, null, options); + if (options.mCancel || options.outWidth == -1 + || options.outHeight == -1) { + return null; + } + options.inSampleSize = computeSampleSize( + options, minSideLength, maxNumOfPixels); + options.inJustDecodeBounds = false; + + options.inDither = false; + options.inPreferredConfig = Bitmap.Config.ARGB_8888; + b = BitmapFactory.decodeFileDescriptor(fd, null, options); + } catch (OutOfMemoryError ex) { + Log.e(TAG, "Got oom exception ", ex); + return null; + } finally { + closeSilently(pfd); + } + return b; + } + + /** + * Creates a centered bitmap of the desired size. + * @param source + * @param recycle whether we want to recycle the input + */ + public static Bitmap extractMiniThumb( + Bitmap source, int width, int height, boolean recycle) { + if (source == null) { + return null; + } + + float scale; + if (source.getWidth() < source.getHeight()) { + scale = width / (float) source.getWidth(); + } else { + scale = height / (float) source.getHeight(); + } + Matrix matrix = new Matrix(); + matrix.setScale(scale, scale); + Bitmap miniThumbnail = transform(matrix, source, width, height, true, recycle); + return miniThumbnail; + } + + /** + * Create a video thumbnail for a video. May return null if the video is + * corrupt. + * + * @param filePath + */ + public static Bitmap createVideoThumbnail(String filePath) { + Bitmap bitmap = null; + MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + try { + retriever.setMode(MediaMetadataRetriever.MODE_CAPTURE_FRAME_ONLY); + retriever.setDataSource(filePath); + bitmap = retriever.captureFrame(); + } catch (IllegalArgumentException ex) { + // Assume this is a corrupt video file + } catch (RuntimeException ex) { + // Assume this is a corrupt video file. + } finally { + try { + retriever.release(); + } catch (RuntimeException ex) { + // Ignore failures while cleaning up. + } + } + return bitmap; + } + + /** + * This method first examines if the thumbnail embedded in EXIF is bigger than our target + * size. If not, then it'll create a thumbnail from original image. Due to efficiency + * consideration, we want to let MediaThumbRequest avoid calling this method twice for + * both kinds, so it only requests for MICRO_KIND and set saveImage to true. + * + * This method always returns a "square thumbnail" for MICRO_KIND thumbnail. + * + * @param cr ContentResolver + * @param filePath file path needed by EXIF interface + * @param uri URI of original image + * @param origId image id + * @param kind either MINI_KIND or MICRO_KIND + * @param saveImage Whether to save MINI_KIND thumbnail obtained in this method. + * @return Bitmap + */ + public static Bitmap createImageThumbnail(ContentResolver cr, String filePath, Uri uri, + long origId, int kind, boolean saveMini) { + boolean wantMini = (kind == Images.Thumbnails.MINI_KIND || saveMini); + int targetSize = wantMini ? + ThumbnailUtil.THUMBNAIL_TARGET_SIZE : ThumbnailUtil.MINI_THUMB_TARGET_SIZE; + int maxPixels = wantMini ? + ThumbnailUtil.THUMBNAIL_MAX_NUM_PIXELS : ThumbnailUtil.MINI_THUMB_MAX_NUM_PIXELS; + byte[] thumbData = createThumbnailFromEXIF(filePath, targetSize); + Bitmap bitmap = null; + + if (thumbData != null) { + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inSampleSize = computeSampleSize(options, targetSize, maxPixels); + options.inDither = false; + options.inPreferredConfig = Bitmap.Config.ARGB_8888; + options.inJustDecodeBounds = false; + bitmap = BitmapFactory.decodeByteArray(thumbData, 0, thumbData.length, options); + } + + if (bitmap == null) { + bitmap = ThumbnailUtil.makeBitmap(targetSize, maxPixels, uri, cr); + } + + if (bitmap == null) { + return null; + } + + if (saveMini) { + if (thumbData != null) { + ThumbnailUtil.storeThumbnail(cr, origId, thumbData, bitmap.getWidth(), + bitmap.getHeight()); + } else { + ThumbnailUtil.storeThumbnail(cr, origId, bitmap); + } + } + + if (kind == Images.Thumbnails.MICRO_KIND) { + // now we make it a "square thumbnail" for MICRO_KIND thumbnail + bitmap = ThumbnailUtil.extractMiniThumb(bitmap, + ThumbnailUtil.MINI_THUMB_TARGET_SIZE, + ThumbnailUtil.MINI_THUMB_TARGET_SIZE, ThumbnailUtil.RECYCLE_INPUT); + } + return bitmap; + } + + public static Bitmap transform(Matrix scaler, + Bitmap source, + int targetWidth, + int targetHeight, + boolean scaleUp, + boolean recycle) { + + int deltaX = source.getWidth() - targetWidth; + int deltaY = source.getHeight() - targetHeight; + if (!scaleUp && (deltaX < 0 || deltaY < 0)) { + /* + * In this case the bitmap is smaller, at least in one dimension, + * than the target. Transform it by placing as much of the image + * as possible into the target and leaving the top/bottom or + * left/right (or both) black. + */ + Bitmap b2 = Bitmap.createBitmap(targetWidth, targetHeight, + Bitmap.Config.ARGB_8888); + Canvas c = new Canvas(b2); + + int deltaXHalf = Math.max(0, deltaX / 2); + int deltaYHalf = Math.max(0, deltaY / 2); + Rect src = new Rect( + deltaXHalf, + deltaYHalf, + deltaXHalf + Math.min(targetWidth, source.getWidth()), + deltaYHalf + Math.min(targetHeight, source.getHeight())); + int dstX = (targetWidth - src.width()) / 2; + int dstY = (targetHeight - src.height()) / 2; + Rect dst = new Rect( + dstX, + dstY, + targetWidth - dstX, + targetHeight - dstY); + c.drawBitmap(source, src, dst, null); + if (recycle) { + source.recycle(); + } + return b2; + } + float bitmapWidthF = source.getWidth(); + float bitmapHeightF = source.getHeight(); + + float bitmapAspect = bitmapWidthF / bitmapHeightF; + float viewAspect = (float) targetWidth / targetHeight; + + if (bitmapAspect > viewAspect) { + float scale = targetHeight / bitmapHeightF; + if (scale < .9F || scale > 1F) { + scaler.setScale(scale, scale); + } else { + scaler = null; + } + } else { + float scale = targetWidth / bitmapWidthF; + if (scale < .9F || scale > 1F) { + scaler.setScale(scale, scale); + } else { + scaler = null; + } + } + + Bitmap b1; + if (scaler != null) { + // this is used for minithumb and crop, so we want to filter here. + b1 = Bitmap.createBitmap(source, 0, 0, + source.getWidth(), source.getHeight(), scaler, true); + } else { + b1 = source; + } + + if (recycle && b1 != source) { + source.recycle(); + } + + int dx1 = Math.max(0, b1.getWidth() - targetWidth); + int dy1 = Math.max(0, b1.getHeight() - targetHeight); + + Bitmap b2 = Bitmap.createBitmap( + b1, + dx1 / 2, + dy1 / 2, + targetWidth, + targetHeight); + + if (b2 != b1) { + if (recycle || b1 != source) { + b1.recycle(); + } + } + + return b2; + } + + private static final String[] THUMB_PROJECTION = new String[] { + BaseColumns._ID // 0 + }; + + /** + * Look up thumbnail uri by given imageId, it will be automatically created if it's not created + * yet. Most of the time imageId is identical to thumbId, but it's not always true. + * @param req + * @param width + * @param height + * @return Uri Thumbnail uri + */ + private static Uri getImageThumbnailUri(ContentResolver cr, long origId, int width, int height) { + Uri thumbUri = Images.Thumbnails.EXTERNAL_CONTENT_URI; + Cursor c = cr.query(thumbUri, THUMB_PROJECTION, + Thumbnails.IMAGE_ID + "=?", + new String[]{String.valueOf(origId)}, null); + try { + if (c.moveToNext()) { + return ContentUris.withAppendedId(thumbUri, c.getLong(0)); + } + } finally { + if (c != null) c.close(); + } + + ContentValues values = new ContentValues(4); + values.put(Thumbnails.KIND, Thumbnails.MINI_KIND); + values.put(Thumbnails.IMAGE_ID, origId); + values.put(Thumbnails.HEIGHT, height); + values.put(Thumbnails.WIDTH, width); + try { + return cr.insert(thumbUri, values); + } catch (Exception ex) { + Log.w(TAG, ex); + return null; + } + } + + /** + * Store a given thumbnail in the database. (Bitmap) + */ + private static boolean storeThumbnail(ContentResolver cr, long origId, Bitmap thumb) { + if (thumb == null) return false; + try { + Uri uri = getImageThumbnailUri(cr, origId, thumb.getWidth(), thumb.getHeight()); + OutputStream thumbOut = cr.openOutputStream(uri); + thumb.compress(Bitmap.CompressFormat.JPEG, 85, thumbOut); + thumbOut.close(); + return true; + } catch (Throwable t) { + Log.e(TAG, "Unable to store thumbnail", t); + return false; + } + } + + /** + * Store a given thumbnail in the database. (byte array) + */ + private static boolean storeThumbnail(ContentResolver cr, long origId, byte[] jpegThumbnail, + int width, int height) { + if (jpegThumbnail == null) return false; + + Uri uri = getImageThumbnailUri(cr, origId, width, height); + if (uri == null) { + return false; + } + try { + OutputStream thumbOut = cr.openOutputStream(uri); + thumbOut.write(jpegThumbnail); + thumbOut.close(); + return true; + } catch (Throwable t) { + Log.e(TAG, "Unable to store thumbnail", t); + return false; + } + } + + // Extract thumbnail in image that meets the targetSize criteria. + static byte[] createThumbnailFromEXIF(String filePath, int targetSize) { + if (filePath == null) return null; + + try { + ExifInterface exif = new ExifInterface(filePath); + if (exif == null) return null; + byte [] thumbData = exif.getThumbnail(); + if (thumbData == null) return null; + // Sniff the size of the EXIF thumbnail before decoding it. Photos + // from the device will pass, but images that are side loaded from + // other cameras may not. + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inJustDecodeBounds = true; + BitmapFactory.decodeByteArray(thumbData, 0, thumbData.length, options); + + int width = options.outWidth; + int height = options.outHeight; + + if (width >= targetSize && height >= targetSize) { + return thumbData; + } + } catch (IOException ex) { + Log.w(TAG, ex); + } + return null; + } +} diff --git a/media/java/android/media/ToneGenerator.java b/media/java/android/media/ToneGenerator.java index e5ee9a3..d232265 100644 --- a/media/java/android/media/ToneGenerator.java +++ b/media/java/android/media/ToneGenerator.java @@ -724,9 +724,9 @@ public class ToneGenerator public static final int TONE_CDMA_SIGNAL_OFF = 98; /** Maximum volume, for use with {@link #ToneGenerator(int,int)} */ - public static final int MAX_VOLUME = AudioSystem.MAX_VOLUME; + public static final int MAX_VOLUME = 100; /** Minimum volume setting, for use with {@link #ToneGenerator(int,int)} */ - public static final int MIN_VOLUME = AudioSystem.MIN_VOLUME; + public static final int MIN_VOLUME = 0; /** @@ -744,7 +744,7 @@ public class ToneGenerator * This method starts the playback of a tone of the specified type. * only one tone can play at a time: if a tone is playing while this method is called, * this tone is stopped and replaced by the one requested. - * @param toneType The type of tone generate chosen from the following list: + * @param toneType The type of tone generated chosen from the following list: * <ul> * <li>{@link #TONE_DTMF_0} * <li>{@link #TONE_DTMF_1} @@ -846,7 +846,18 @@ public class ToneGenerator * </ul> * @see #ToneGenerator(int, int) */ - public native boolean startTone(int toneType); + public boolean startTone(int toneType) { + return startTone(toneType, -1); + } + + /** + * This method starts the playback of a tone of the specified type for the specified duration. + * @param toneType The type of tone generated @see {@link #startTone(int)}. + * @param durationMs The tone duration in milliseconds. If the tone is limited in time by definition, + * the actual duration will be the minimum of durationMs and the defined tone duration. Setting durationMs to -1, + * is equivalent to calling {@link #startTone(int)}. + */ + public native boolean startTone(int toneType, int durationMs); /** * This method stops the tone currently playing playback. |