summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioFormat.java55
-rw-r--r--media/java/android/media/AudioManager.java203
-rw-r--r--media/java/android/media/AudioRecord.java36
-rw-r--r--media/java/android/media/AudioService.java701
-rw-r--r--media/java/android/media/AudioSystem.java157
-rw-r--r--media/java/android/media/AudioTrack.java35
-rw-r--r--media/java/android/media/IAudioService.aidl16
-rw-r--r--media/java/android/media/JetPlayer.java2
-rw-r--r--media/java/android/media/MediaFile.java8
-rw-r--r--media/java/android/media/MediaPlayer.java203
-rw-r--r--media/java/android/media/MediaScanner.java12
-rw-r--r--media/java/android/media/Metadata.java418
-rw-r--r--media/java/android/media/RingtoneManager.java32
-rw-r--r--media/java/android/media/ToneGenerator.java4
-rw-r--r--media/jni/Android.mk4
-rw-r--r--media/jni/android_media_MediaPlayer.cpp83
-rw-r--r--media/jni/soundpool/Android.mk1
-rw-r--r--media/jni/soundpool/SoundPool.cpp5
-rw-r--r--media/libdrm/mobile2/include/rights/RoManager.h6
-rw-r--r--media/libdrm/mobile2/src/rights/RoManager.cpp6
-rw-r--r--media/libmedia/Android.mk44
-rw-r--r--media/libmedia/AudioRecord.cpp98
-rw-r--r--media/libmedia/AudioSystem.cpp744
-rw-r--r--media/libmedia/AudioTrack.cpp197
-rw-r--r--media/libmedia/IAudioFlinger.cpp390
-rw-r--r--media/libmedia/IAudioFlingerClient.cpp53
-rw-r--r--media/libmedia/IAudioPolicyService.cpp423
-rw-r--r--media/libmedia/IAudioRecord.cpp8
-rw-r--r--media/libmedia/IAudioTrack.cpp8
-rw-r--r--media/libmedia/IMediaMetadataRetriever.cpp11
-rw-r--r--media/libmedia/IMediaPlayer.cpp60
-rw-r--r--media/libmedia/IMediaPlayerClient.cpp13
-rw-r--r--media/libmedia/IMediaPlayerService.cpp29
-rw-r--r--media/libmedia/IMediaRecorder.cpp10
-rw-r--r--media/libmedia/IOMX.cpp561
-rw-r--r--media/libmedia/JetPlayer.cpp2
-rw-r--r--media/libmedia/Metadata.cpp168
-rw-r--r--media/libmedia/ToneGenerator.cpp2
-rw-r--r--media/libmedia/mediametadataretriever.cpp4
-rw-r--r--media/libmedia/mediaplayer.cpp43
-rw-r--r--media/libmedia/mediarecorder.cpp2
-rw-r--r--media/libmediaplayerservice/Android.mk21
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp349
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h59
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp8
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.cpp10
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.h7
-rw-r--r--media/libmediaplayerservice/MidiFile.h4
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp208
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h60
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.cpp196
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h119
-rw-r--r--media/libmediaplayerservice/VorbisPlayer.h2
-rw-r--r--media/libstagefright/Android.mk60
-rw-r--r--media/libstagefright/AudioPlayer.cpp285
-rw-r--r--media/libstagefright/CachingDataSource.cpp162
-rw-r--r--media/libstagefright/CameraSource.cpp226
-rw-r--r--media/libstagefright/DataSource.cpp76
-rw-r--r--media/libstagefright/ESDS.cpp196
-rw-r--r--media/libstagefright/FileSource.cpp50
-rw-r--r--media/libstagefright/HTTPDataSource.cpp174
-rw-r--r--media/libstagefright/HTTPStream.cpp285
-rw-r--r--media/libstagefright/MP3Extractor.cpp527
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp937
-rw-r--r--media/libstagefright/MPEG4Writer.cpp641
-rw-r--r--media/libstagefright/MediaBuffer.cpp174
-rw-r--r--media/libstagefright/MediaBufferGroup.cpp88
-rw-r--r--media/libstagefright/MediaExtractor.cpp54
-rw-r--r--media/libstagefright/MediaPlayerImpl.cpp693
-rw-r--r--media/libstagefright/MediaSource.cpp60
-rw-r--r--media/libstagefright/MetaData.cpp232
-rw-r--r--media/libstagefright/MmapSource.cpp112
-rw-r--r--media/libstagefright/OMXClient.cpp369
-rw-r--r--media/libstagefright/OMXDecoder.cpp1329
-rw-r--r--media/libstagefright/QComHardwareRenderer.cpp139
-rw-r--r--media/libstagefright/SampleTable.cpp598
-rw-r--r--media/libstagefright/ShoutcastSource.cpp154
-rw-r--r--media/libstagefright/SoftwareRenderer.cpp173
-rw-r--r--media/libstagefright/SurfaceRenderer.cpp69
-rw-r--r--media/libstagefright/TimeSource.cpp41
-rw-r--r--media/libstagefright/TimedEventQueue.cpp207
-rw-r--r--media/libstagefright/Utils.cpp45
-rw-r--r--media/libstagefright/omx/Android.mk27
-rw-r--r--media/libstagefright/omx/OMX.cpp623
-rw-r--r--media/libstagefright/omx/OMX.h132
-rw-r--r--media/libstagefright/string.cpp83
-rw-r--r--media/mediaserver/Android.mk3
-rw-r--r--media/mediaserver/main_mediaserver.cpp8
-rw-r--r--media/sdutils/sdutil.cpp4
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java6
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java13
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java96
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java70
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java10
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java432
-rw-r--r--media/tests/players/Android.mk29
-rw-r--r--media/tests/players/README8
-rw-r--r--media/tests/players/invoke_mock_media_player.cpp121
98 files changed, 14540 insertions, 1181 deletions
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index 0732b61..500f6a4 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -37,15 +37,58 @@ public class AudioFormat {
public static final int ENCODING_PCM_8BIT = 3; // accessed by native code
/** Invalid audio channel configuration */
- public static final int CHANNEL_CONFIGURATION_INVALID = 0;
+ /** @deprecated use CHANNEL_INVALID instead */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0;
/** Default audio channel configuration */
- public static final int CHANNEL_CONFIGURATION_DEFAULT = 1;
+ /** @deprecated use CHANNEL_OUT_DEFAULT or CHANNEL_IN_DEFAULT instead */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1;
/** Mono audio configuration */
- public static final int CHANNEL_CONFIGURATION_MONO = 2;
+ /** @deprecated use CHANNEL_OUT_MONO or CHANNEL_IN_MONO instead */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2;
/** Stereo (2 channel) audio configuration */
- public static final int CHANNEL_CONFIGURATION_STEREO = 3;
-
-}
+ /** @deprecated use CHANNEL_OUT_STEREO or CHANNEL_IN_STEREO instead */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3;
+ /** Invalid audio channel mask */
+ public static final int CHANNEL_INVALID = -1;
+ /** Default audio channel mask */
+ // Channel mask definitions must be kept in sync with native values in include/media/AudioSystem.h
+ public static final int CHANNEL_OUT_DEFAULT = 0;
+ public static final int CHANNEL_OUT_FRONT_LEFT = 0x1;
+ public static final int CHANNEL_OUT_FRONT_RIGHT = 0x2;
+ public static final int CHANNEL_OUT_FRONT_CENTER = 0x4;
+ public static final int CHANNEL_OUT_LOW_FREQUENCY = 0x8;
+ public static final int CHANNEL_OUT_BACK_LEFT = 0x10;
+ public static final int CHANNEL_OUT_BACK_RIGHT = 0x20;
+ public static final int CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x40;
+ public static final int CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80;
+ public static final int CHANNEL_OUT_BACK_CENTER = 0x100;
+ public static final int CHANNEL_OUT_MONO = CHANNEL_OUT_FRONT_LEFT;
+ public static final int CHANNEL_OUT_STEREO = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT);
+ public static final int CHANNEL_OUT_QUAD = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT);
+ public static final int CHANNEL_OUT_SURROUND = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER);
+ public static final int CHANNEL_OUT_5POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT);
+ public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT |
+ CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER);
+ public static final int CHANNEL_IN_DEFAULT = 0;
+ public static final int CHANNEL_IN_LEFT = 0x10000;
+ public static final int CHANNEL_IN_RIGHT = 0x20000;
+ public static final int CHANNEL_IN_FRONT = 0x40000;
+ public static final int CHANNEL_IN_BACK = 0x80000;
+ public static final int CHANNEL_IN_LEFT_PROCESSED = 0x100000;
+ public static final int CHANNEL_IN_RIGHT_PROCESSED = 0x200000;
+ public static final int CHANNEL_IN_FRONT_PROCESSED = 0x400000;
+ public static final int CHANNEL_IN_BACK_PROCESSED = 0x800000;
+ public static final int CHANNEL_IN_PRESSURE = 0x1000000;
+ public static final int CHANNEL_IN_X_AXIS = 0x2000000;
+ public static final int CHANNEL_IN_Y_AXIS = 0x4000000;
+ public static final int CHANNEL_IN_Z_AXIS = 0x8000000;
+ public static final int CHANNEL_IN_MONO = CHANNEL_IN_FRONT;
+ public static final int CHANNEL_IN_STEREO = (CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT);
+}
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index a65a417..040d4bc 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -140,11 +140,17 @@ public class AudioManager {
public static final int STREAM_NOTIFICATION = AudioSystem.STREAM_NOTIFICATION;
/** @hide The audio stream for phone calls when connected to bluetooth */
public static final int STREAM_BLUETOOTH_SCO = AudioSystem.STREAM_BLUETOOTH_SCO;
+ /** @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */
+ public static final int STREAM_SYSTEM_ENFORCED = AudioSystem.STREAM_SYSTEM_ENFORCED;
+ /** The audio stream for DTMF Tones */
+ public static final int STREAM_DTMF = AudioSystem.STREAM_DTMF;
+ /** @hide The audio stream for text to speech (TTS) */
+ public static final int STREAM_TTS = AudioSystem.STREAM_TTS;
/** Number of audio streams */
/**
* @deprecated Use AudioSystem.getNumStreamTypes() instead
*/
- public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS;
+ @Deprecated public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS;
/** @hide Maximum volume index values for audio streams */
@@ -156,6 +162,9 @@ public class AudioManager {
8, // STREAM_ALARM
8, // STREAM_NOTIFICATION
16, // STREAM_BLUETOOTH_SCO
+ 8, // STREAM_SYSTEM_ENFORCED
+ 16, // STREAM_DTMF
+ 16 // STREAM_TTS
};
/** @hide Default volume index values for audio streams */
@@ -166,7 +175,10 @@ public class AudioManager {
11, // STREAM_MUSIC
6, // STREAM_ALARM
5, // STREAM_NOTIFICATION
- 7 // STREAM_BLUETOOTH_SCO
+ 7, // STREAM_BLUETOOTH_SCO
+ 5, // STREAM_SYSTEM_ENFORCED
+ 11, // STREAM_DTMF
+ 11 // STREAM_TTS
};
/**
@@ -637,9 +649,11 @@ public class AudioManager {
* <var>false</var> to turn it off
*/
public void setSpeakerphoneOn(boolean on){
- // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
- // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
- setRoutingP(MODE_INVALID, on ? ROUTE_SPEAKER: 0, ROUTE_SPEAKER);
+ if (on) {
+ AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_SPEAKER);
+ } else {
+ AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE);
+ }
}
/**
@@ -648,41 +662,47 @@ public class AudioManager {
* @return true if speakerphone is on, false if it's off
*/
public boolean isSpeakerphoneOn() {
- return (getRoutingP(MODE_IN_CALL) & ROUTE_SPEAKER) == 0 ? false : true;
+ if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_SPEAKER) {
+ return true;
+ } else {
+ return false;
+ }
}
/**
- * Sets audio routing to the Bluetooth headset on or off.
+ * Request use of Bluetooth SCO headset for communications.
*
- * @param on set <var>true</var> to route SCO (voice) audio to/from Bluetooth
- * headset; <var>false</var> to route audio to/from phone earpiece
+ * @param on set <var>true</var> to use bluetooth SCO for communications;
+ * <var>false</var> to not use bluetooth SCO for communications
*/
public void setBluetoothScoOn(boolean on){
- // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
- // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
- setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_SCO: 0, ROUTE_BLUETOOTH_SCO);
+ if (on) {
+ AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_BT_SCO);
+ } else {
+ AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE);
+ }
}
/**
- * Checks whether audio routing to the Bluetooth headset is on or off.
+ * Checks whether communications use Bluetooth SCO.
*
- * @return true if SCO audio is being routed to/from Bluetooth headset;
+ * @return true if SCO is used for communications;
* false if otherwise
*/
public boolean isBluetoothScoOn() {
- return (getRoutingP(MODE_IN_CALL) & ROUTE_BLUETOOTH_SCO) == 0 ? false : true;
+ if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_BT_SCO) {
+ return true;
+ } else {
+ return false;
+ }
}
/**
- * Sets A2DP audio routing to the Bluetooth headset on or off.
- *
* @param on set <var>true</var> to route A2DP audio to/from Bluetooth
* headset; <var>false</var> disable A2DP audio
+ * @deprecated Do not use.
*/
- public void setBluetoothA2dpOn(boolean on){
- // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
- // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
- setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP);
+ @Deprecated public void setBluetoothA2dpOn(boolean on){
}
/**
@@ -692,7 +712,12 @@ public class AudioManager {
* false if otherwise
*/
public boolean isBluetoothA2dpOn() {
- return (getRoutingP(MODE_NORMAL) & ROUTE_BLUETOOTH_A2DP) == 0 ? false : true;
+ if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,"")
+ == AudioSystem.DEVICE_STATE_UNAVAILABLE) {
+ return false;
+ } else {
+ return true;
+ }
}
/**
@@ -700,12 +725,9 @@ public class AudioManager {
*
* @param on set <var>true</var> to route audio to/from wired
* headset; <var>false</var> disable wired headset audio
- * @hide
+ * @deprecated Do not use.
*/
- public void setWiredHeadsetOn(boolean on){
- // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
- // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager
- setRoutingP(MODE_INVALID, on ? ROUTE_HEADSET: 0, ROUTE_HEADSET);
+ @Deprecated public void setWiredHeadsetOn(boolean on){
}
/**
@@ -713,10 +735,14 @@ public class AudioManager {
*
* @return true if audio is being routed to/from wired headset;
* false if otherwise
- * @hide
*/
public boolean isWiredHeadsetOn() {
- return (getRoutingP(MODE_NORMAL) & ROUTE_HEADSET) == 0 ? false : true;
+ if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET,"")
+ == AudioSystem.DEVICE_STATE_UNAVAILABLE) {
+ return false;
+ } else {
+ return true;
+ }
}
/**
@@ -726,12 +752,7 @@ public class AudioManager {
* <var>false</var> to turn mute off
*/
public void setMicrophoneMute(boolean on){
- IAudioService service = getService();
- try {
- service.setMicrophoneMute(on);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setMicrophoneMute", e);
- }
+ AudioSystem.muteMicrophone(on);
}
/**
@@ -740,13 +761,7 @@ public class AudioManager {
* @return true if microphone is muted, false if it's not
*/
public boolean isMicrophoneMute() {
- IAudioService service = getService();
- try {
- return service.isMicrophoneMute();
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in isMicrophoneMute", e);
- return false;
- }
+ return AudioSystem.isMicrophoneMuted();
}
/**
@@ -809,32 +824,39 @@ public class AudioManager {
/* Routing bits for setRouting/getRouting API */
/**
* Routing audio output to earpiece
+ * @deprecated
*/
- public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE;
+ @Deprecated public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE;
/**
* Routing audio output to spaker
+ * @deprecated
*/
- public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER;
+ @Deprecated public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER;
/**
* @deprecated use {@link #ROUTE_BLUETOOTH_SCO}
+ * @deprecated
*/
@Deprecated public static final int ROUTE_BLUETOOTH = AudioSystem.ROUTE_BLUETOOTH_SCO;
/**
* Routing audio output to bluetooth SCO
+ * @deprecated
*/
- public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO;
+ @Deprecated public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO;
/**
* Routing audio output to headset
+ * @deprecated
*/
- public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET;
+ @Deprecated public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET;
/**
* Routing audio output to bluetooth A2DP
+ * @deprecated
*/
- public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP;
/**
* Used for mask parameter of {@link #setRouting(int,int,int)}.
+ * @deprecated
*/
- public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL;
+ @Deprecated public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL;
/**
* Sets the audio routing for a specified mode
@@ -846,16 +868,10 @@ public class AudioManager {
* ROUTE_xxx types. Unset bits indicate the route should be left unchanged
*
* @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
- * setBluetoothScoOn(), setBluetoothA2dpOn() and setWiredHeadsetOn() methods instead.
+ * setBluetoothScoOn() methods instead.
*/
-
+ @Deprecated
public void setRouting(int mode, int routes, int mask) {
- IAudioService service = getService();
- try {
- service.setRouting(mode, routes, mask);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setRouting", e);
- }
}
/**
@@ -869,13 +885,7 @@ public class AudioManager {
*/
@Deprecated
public int getRouting(int mode) {
- IAudioService service = getService();
- try {
- return service.getRouting(mode);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in getRouting", e);
- return -1;
- }
+ return -1;
}
/**
@@ -884,13 +894,7 @@ public class AudioManager {
* @return true if any music tracks are active.
*/
public boolean isMusicActive() {
- IAudioService service = getService();
- try {
- return service.isMusicActive();
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in isMusicActive", e);
- return false;
- }
+ return AudioSystem.isMusicActive();
}
/*
@@ -906,14 +910,32 @@ public class AudioManager {
*/
/**
* @hide
+ * @deprecated Use {@link #setPrameters(String)} instead
*/
- public void setParameter(String key, String value) {
- IAudioService service = getService();
- try {
- service.setParameter(key, value);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setParameter", e);
- }
+ @Deprecated public void setParameter(String key, String value) {
+ setParameters(key+"="+value);
+ }
+
+ /**
+ * Sets a variable number of parameter values to audio hardware.
+ *
+ * @param keyValuePairs list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ *
+ */
+ public void setParameters(String keyValuePairs) {
+ AudioSystem.setParameters(keyValuePairs);
+ }
+
+ /**
+ * Sets a varaible number of parameter values to audio hardware.
+ *
+ * @param keys list of parameters
+ * @return list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ */
+ public String getParameters(String keys) {
+ return AudioSystem.getParameters(keys);
}
/* Sound effect identifiers */
@@ -1082,31 +1104,4 @@ public class AudioManager {
* {@hide}
*/
private IBinder mICallBack = new Binder();
-
- /**
- * {@hide}
- */
- private void setRoutingP(int mode, int routes, int mask) {
- IAudioService service = getService();
- try {
- service.setRouting(mode, routes, mask);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in setRouting", e);
- }
- }
-
-
- /**
- * {@hide}
- */
- private int getRoutingP(int mode) {
- IAudioService service = getService();
- try {
- return service.getRouting(mode);
- } catch (RemoteException e) {
- Log.e(TAG, "Dead object in getRouting", e);
- return -1;
- }
- }
-
}
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index 4d1535f..d96331f 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -86,7 +86,7 @@ public class AudioRecord
public static final int ERROR_INVALID_OPERATION = -3;
private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT = -16;
- private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELCOUNT = -17;
+ private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK = -17;
private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT = -18;
private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE = -19;
private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED = -20;
@@ -135,7 +135,7 @@ public class AudioRecord
/**
* The current audio channel configuration
*/
- private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ private int mChannelConfiguration = AudioFormat.CHANNEL_IN_MONO;
/**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
@@ -193,8 +193,8 @@ public class AudioRecord
* @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but
* not limited to) 44100, 22050 and 11025.
* @param channelConfig describes the configuration of the audio channels.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
- * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}
+ * See {@link AudioFormat#CHANNEL_IN_MONO} and
+ * {@link AudioFormat#CHANNEL_IN_STEREO}
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
@@ -265,18 +265,18 @@ public class AudioRecord
//--------------
// channel config
switch (channelConfig) {
- case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT:
- case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ case AudioFormat.CHANNEL_IN_DEFAULT:
+ case AudioFormat.CHANNEL_IN_MONO:
mChannelCount = 1;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ mChannelConfiguration = AudioFormat.CHANNEL_IN_MONO;
break;
- case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ case AudioFormat.CHANNEL_IN_STEREO:
mChannelCount = 2;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ mChannelConfiguration = AudioFormat.CHANNEL_IN_STEREO;
break;
default:
mChannelCount = 0;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID;
+ mChannelConfiguration = AudioFormat.CHANNEL_INVALID;
throw (new IllegalArgumentException("Unsupported channel configuration."));
}
@@ -368,8 +368,8 @@ public class AudioRecord
/**
* Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO}
- * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}.
+ * See {@link AudioFormat#CHANNEL_IN_MONO}
+ * and {@link AudioFormat#CHANNEL_IN_STEREO}.
*/
public int getChannelConfiguration() {
return mChannelConfiguration;
@@ -425,8 +425,8 @@ public class AudioRecord
* will be polled for new data.
* @param sampleRateInHz the sample rate expressed in Hertz.
* @param channelConfig describes the configuration of the audio channels.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
- * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}
+ * See {@link AudioFormat#CHANNEL_IN_MONO} and
+ * {@link AudioFormat#CHANNEL_IN_STEREO}
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT}.
* @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the
@@ -438,14 +438,14 @@ public class AudioRecord
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
switch(channelConfig) {
- case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT:
- case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ case AudioFormat.CHANNEL_IN_DEFAULT:
+ case AudioFormat.CHANNEL_IN_MONO:
channelCount = 1;
break;
- case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ case AudioFormat.CHANNEL_IN_STEREO:
channelCount = 2;
break;
- case AudioFormat.CHANNEL_CONFIGURATION_INVALID:
+ case AudioFormat.CHANNEL_INVALID:
default:
loge("getMinBufferSize(): Invalid channel configuration.");
return AudioRecord.ERROR_BAD_VALUE;
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 58c04f3..177a7cf 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -20,6 +20,12 @@ import android.app.ActivityManagerNative;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
+import android.content.IntentFilter;
+import android.bluetooth.BluetoothIntent;
+import android.content.BroadcastReceiver;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothA2dp;
+
import android.content.pm.PackageManager;
import android.database.ContentObserver;
import android.media.MediaPlayer.OnCompletionListener;
@@ -94,16 +100,10 @@ public class AudioService extends IAudioService.Stub {
/** @see VolumeStreamState */
private VolumeStreamState[] mStreamStates;
private SettingsObserver mSettingsObserver;
-
- private boolean mMicMute;
+
private int mMode;
- private int[] mRoutes = new int[AudioSystem.NUM_MODES];
private Object mSettingsLock = new Object();
private boolean mMediaServerOk;
- private boolean mSpeakerIsOn;
- private boolean mBluetoothScoIsConnected;
- private boolean mHeadsetIsConnected;
- private boolean mBluetoothA2dpIsConnected;
private SoundPool mSoundPool;
private Object mSoundEffectsLock = new Object();
@@ -135,6 +135,23 @@ public class AudioService extends IAudioService.Stub {
{4, -1} // FX_FOCUS_RETURN
};
+ /* STREAM_VOLUME_ALIAS[] indicates for each stream if it uses the volume settings
+ * of another stream: This avoids multiplying the volume settings for hidden
+ * stream types that follow other stream behavior for volume settings
+ * NOTE: do not create loops in aliases! */
+ private int[] STREAM_VOLUME_ALIAS = new int[] {
+ AudioSystem.STREAM_VOICE_CALL, // STREAM_VOICE_CALL
+ AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM
+ AudioSystem.STREAM_RING, // STREAM_RING
+ AudioSystem.STREAM_MUSIC, // STREAM_MUSIC
+ AudioSystem.STREAM_ALARM, // STREAM_ALARM
+ AudioSystem.STREAM_NOTIFICATION, // STREAM_NOTIFICATION
+ AudioSystem.STREAM_VOICE_CALL, // STREAM_BLUETOOTH_SCO
+ AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM_ENFORCED
+ AudioSystem.STREAM_VOICE_CALL, // STREAM_DTMF
+ AudioSystem.STREAM_MUSIC // STREAM_TTS
+ };
+
private AudioSystem.ErrorCallback mAudioSystemCallback = new AudioSystem.ErrorCallback() {
public void onError(int error) {
switch (error) {
@@ -178,6 +195,21 @@ public class AudioService extends IAudioService.Stub {
*/
private int mVibrateSetting;
+ /** @see System#NOTIFICATIONS_USE_RING_VOLUME */
+ private int mNotificationsUseRingVolume;
+
+ // Broadcast receiver for device connections intent broadcasts
+ private final BroadcastReceiver mReceiver = new AudioServiceBroadcastReceiver();
+
+ //TODO: use common definitions with HeadsetObserver
+ private static final int BIT_HEADSET = (1 << 0);
+ private static final int BIT_HEADSET_NO_MIC = (1 << 1);
+ private static final int BIT_TTY = (1 << 2);
+ private static final int BIT_FM_HEADSET = (1 << 3);
+ private static final int BIT_FM_SPEAKER = (1 << 4);
+
+ private int mHeadsetState;
+
///////////////////////////////////////////////////////////////////////////
// Construction
///////////////////////////////////////////////////////////////////////////
@@ -188,18 +220,20 @@ public class AudioService extends IAudioService.Stub {
mContentResolver = context.getContentResolver();
mVolumePanel = new VolumePanel(context, this);
mSettingsObserver = new SettingsObserver();
-
+
createAudioSystemThread();
- createStreamStates();
readPersistedSettings();
- readAudioSettings();
+ createStreamStates();
mMediaServerOk = true;
AudioSystem.setErrorCallback(mAudioSystemCallback);
loadSoundEffects();
- mSpeakerIsOn = false;
- mBluetoothScoIsConnected = false;
- mHeadsetIsConnected = false;
- mBluetoothA2dpIsConnected = false;
+
+ // Register for device connection intent broadcasts.
+ IntentFilter intentFilter =
+ new IntentFilter(Intent.ACTION_HEADSET_PLUG);
+ intentFilter.addAction(BluetoothA2dp.SINK_STATE_CHANGED_ACTION);
+ intentFilter.addAction(BluetoothIntent.HEADSET_AUDIO_STATE_CHANGED_ACTION);
+ context.registerReceiver(mReceiver, intentFilter);
}
private void createAudioSystemThread() {
@@ -223,63 +257,23 @@ public class AudioService extends IAudioService.Stub {
}
private void createStreamStates() {
- final int[] volumeLevelsPhone =
- createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_VOICE_CALL]);
- final int[] volumeLevelsCoarse =
- createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_SYSTEM]);
- final int[] volumeLevelsFine =
- createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_MUSIC]);
- final int[] volumeLevelsBtPhone =
- createVolumeLevels(0,
- AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_BLUETOOTH_SCO]);
-
int numStreamTypes = AudioSystem.getNumStreamTypes();
VolumeStreamState[] streams = mStreamStates = new VolumeStreamState[numStreamTypes];
for (int i = 0; i < numStreamTypes; i++) {
- final int[] levels;
-
- switch (i) {
-
- case AudioSystem.STREAM_MUSIC:
- levels = volumeLevelsFine;
- break;
-
- case AudioSystem.STREAM_VOICE_CALL:
- levels = volumeLevelsPhone;
- break;
-
- case AudioSystem.STREAM_BLUETOOTH_SCO:
- levels = volumeLevelsBtPhone;
- break;
-
- default:
- levels = volumeLevelsCoarse;
- break;
- }
-
- if (i == AudioSystem.STREAM_BLUETOOTH_SCO) {
- streams[i] = new VolumeStreamState(AudioManager.DEFAULT_STREAM_VOLUME[i], i,levels);
- } else {
- streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[i], i, levels);
- }
+ streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[STREAM_VOLUME_ALIAS[i]], i);
}
- }
- private static int[] createVolumeLevels(int offset, int numlevels) {
- double curve = 1.0f; // 1.4f
- int [] volumes = new int[numlevels + offset];
- for (int i = 0; i < offset; i++) {
- volumes[i] = 0;
- }
-
- double val = 0;
- double max = Math.pow(numlevels - 1, curve);
- for (int i = 0; i < numlevels; i++) {
- val = Math.pow(i, curve) / max;
- volumes[offset + i] = (int) (val * 100.0f);
+ // Correct stream index values for streams with aliases
+ for (int i = 0; i < numStreamTypes; i++) {
+ if (STREAM_VOLUME_ALIAS[i] != i) {
+ int index = rescaleIndex(streams[i].mIndex, STREAM_VOLUME_ALIAS[i], i);
+ streams[i].mIndex = streams[i].getValidIndex(index);
+ setStreamVolumeIndex(i, index);
+ index = rescaleIndex(streams[i].mLastAudibleIndex, STREAM_VOLUME_ALIAS[i], i);
+ streams[i].mLastAudibleIndex = streams[i].getValidIndex(index);
+ }
}
- return volumes;
}
private void readPersistedSettings() {
@@ -291,12 +285,19 @@ public class AudioService extends IAudioService.Stub {
mRingerModeAffectedStreams = Settings.System.getInt(cr,
Settings.System.MODE_RINGER_STREAMS_AFFECTED,
- ((1 << AudioManager.STREAM_RING)|(1 << AudioManager.STREAM_NOTIFICATION)|(1 << AudioManager.STREAM_SYSTEM)));
+ ((1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_NOTIFICATION)|
+ (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED)));
mMuteAffectedStreams = System.getInt(cr,
System.MUTE_STREAMS_AFFECTED,
((1 << AudioSystem.STREAM_MUSIC)|(1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_SYSTEM)));
+ mNotificationsUseRingVolume = System.getInt(cr,
+ Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1);
+
+ if (mNotificationsUseRingVolume == 1) {
+ STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING;
+ }
// Each stream will read its own persisted settings
// Broadcast the sticky intent
@@ -307,25 +308,13 @@ public class AudioService extends IAudioService.Stub {
broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_NOTIFICATION);
}
- private void readAudioSettings() {
- synchronized (mSettingsLock) {
- mMicMute = AudioSystem.isMicrophoneMuted();
- mMode = AudioSystem.getMode();
- for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) {
- mRoutes[mode] = AudioSystem.getRouting(mode);
- }
- }
+ private void setStreamVolumeIndex(int stream, int index) {
+ AudioSystem.setStreamVolumeIndex(stream, (index + 5)/10);
}
- private void applyAudioSettings() {
- synchronized (mSettingsLock) {
- AudioSystem.muteMicrophone(mMicMute);
- AudioSystem.setMode(mMode);
- for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) {
- AudioSystem.setRouting(mode, mRoutes[mode], AudioSystem.ROUTE_ALL);
- }
- }
- }
+ private int rescaleIndex(int index, int srcStream, int dstStream) {
+ return (index * mStreamStates[dstStream].getMaxIndex() + mStreamStates[srcStream].getMaxIndex() / 2) / mStreamStates[srcStream].getMaxIndex();
+ }
///////////////////////////////////////////////////////////////////////////
// IPC methods
@@ -354,44 +343,26 @@ public class AudioService extends IAudioService.Stub {
ensureValidDirection(direction);
ensureValidStreamType(streamType);
- boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver,
- Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1;
- if (notificationsUseRingVolume && streamType == AudioManager.STREAM_NOTIFICATION) {
- // Redirect the volume change to the ring stream
- streamType = AudioManager.STREAM_RING;
- }
- VolumeStreamState streamState = mStreamStates[streamType];
+ VolumeStreamState streamState = mStreamStates[STREAM_VOLUME_ALIAS[streamType]];
final int oldIndex = streamState.mIndex;
boolean adjustVolume = true;
// If either the client forces allowing ringer modes for this adjustment,
// or the stream type is one that is affected by ringer modes
if ((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0
- || streamType == AudioManager.STREAM_RING) {
+ || streamType == AudioSystem.STREAM_RING) {
// Check if the ringer mode changes with this volume adjustment. If
// it does, it will handle adjusting the volume, so we won't below
adjustVolume = checkForRingerModeChange(oldIndex, direction);
}
if (adjustVolume && streamState.adjustIndex(direction)) {
-
- boolean alsoUpdateNotificationVolume = notificationsUseRingVolume &&
- streamType == AudioManager.STREAM_RING;
- if (alsoUpdateNotificationVolume) {
- mStreamStates[AudioManager.STREAM_NOTIFICATION].adjustIndex(direction);
- }
-
// Post message to set system volume (it in turn will post a message
// to persist). Do not change volume if stream is muted.
if (streamState.muteCount() == 0) {
- sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, streamType, SENDMSG_NOOP, 0, 0,
+ sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, STREAM_VOLUME_ALIAS[streamType], SENDMSG_NOOP, 0, 0,
streamState, 0);
-
- if (alsoUpdateNotificationVolume) {
- sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, AudioManager.STREAM_NOTIFICATION,
- SENDMSG_NOOP, 0, 0, mStreamStates[AudioManager.STREAM_NOTIFICATION], 0);
- }
}
}
@@ -404,9 +375,8 @@ public class AudioService extends IAudioService.Stub {
/** @see AudioManager#setStreamVolume(int, int, int) */
public void setStreamVolume(int streamType, int index, int flags) {
ensureValidStreamType(streamType);
- syncRingerAndNotificationStreamVolume(streamType, index, false);
-
- setStreamVolumeInt(streamType, index, false, true);
+ index = rescaleIndex(index * 10, streamType, STREAM_VOLUME_ALIAS[streamType]);
+ setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, false, true);
// UI, etc.
mVolumePanel.postVolumeChanged(streamType, flags);
@@ -420,37 +390,12 @@ public class AudioService extends IAudioService.Stub {
intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, getStreamVolume(streamType));
// Currently, sending the intent only when the stream is BLUETOOTH_SCO
- if (streamType == AudioManager.STREAM_BLUETOOTH_SCO) {
+ if (streamType == AudioSystem.STREAM_BLUETOOTH_SCO) {
mContext.sendBroadcast(intent);
}
}
/**
- * Sync the STREAM_RING and STREAM_NOTIFICATION volumes if mandated by the
- * value in Settings.
- *
- * @param streamType Type of the stream
- * @param index Volume index for the stream
- * @param force If true, set the volume even if the current and desired
- * volume as same
- */
- private void syncRingerAndNotificationStreamVolume(int streamType, int index, boolean force) {
- boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver,
- Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1;
- if (notificationsUseRingVolume) {
- if (streamType == AudioManager.STREAM_NOTIFICATION) {
- // Redirect the volume change to the ring stream
- streamType = AudioManager.STREAM_RING;
- }
- if (streamType == AudioManager.STREAM_RING) {
- // One-off to sync notification volume to ringer volume
- setStreamVolumeInt(AudioManager.STREAM_NOTIFICATION, index, force, true);
- }
- }
- }
-
-
- /**
* Sets the stream state's index, and posts a message to set system volume.
* This will not call out to the UI. Assumes a valid stream type.
*
@@ -491,13 +436,13 @@ public class AudioService extends IAudioService.Stub {
/** @see AudioManager#getStreamVolume(int) */
public int getStreamVolume(int streamType) {
ensureValidStreamType(streamType);
- return mStreamStates[streamType].mIndex;
+ return (mStreamStates[streamType].mIndex + 5) / 10;
}
/** @see AudioManager#getStreamMaxVolume(int) */
public int getStreamMaxVolume(int streamType) {
ensureValidStreamType(streamType);
- return mStreamStates[streamType].getMaxIndex();
+ return (mStreamStates[streamType].getMaxIndex() + 5) / 10;
}
/** @see AudioManager#getRingerMode() */
@@ -507,11 +452,12 @@ public class AudioService extends IAudioService.Stub {
/** @see AudioManager#setRingerMode(int) */
public void setRingerMode(int ringerMode) {
- if (ringerMode != mRingerMode) {
- setRingerModeInt(ringerMode, true);
-
- // Send sticky broadcast
- broadcastRingerMode();
+ synchronized (mSettingsLock) {
+ if (ringerMode != mRingerMode) {
+ setRingerModeInt(ringerMode, true);
+ // Send sticky broadcast
+ broadcastRingerMode();
+ }
}
}
@@ -541,7 +487,7 @@ public class AudioService extends IAudioService.Stub {
}
}
}
-
+
// Post a persist ringer mode msg
if (persist) {
sendMsg(mAudioHandler, MSG_PERSIST_RINGER_MODE, SHARED_MSG,
@@ -606,39 +552,28 @@ public class AudioService extends IAudioService.Stub {
return existingValue;
}
- /** @see AudioManager#setMicrophoneMute(boolean) */
- public void setMicrophoneMute(boolean on) {
- if (!checkAudioSettingsPermission("setMicrophoneMute()")) {
- return;
- }
- synchronized (mSettingsLock) {
- if (on != mMicMute) {
- AudioSystem.muteMicrophone(on);
- mMicMute = on;
- }
- }
- }
-
- /** @see AudioManager#isMicrophoneMute() */
- public boolean isMicrophoneMute() {
- return mMicMute;
- }
-
/** @see AudioManager#setMode(int) */
public void setMode(int mode) {
if (!checkAudioSettingsPermission("setMode()")) {
return;
}
+
+ if (mode < AudioSystem.MODE_CURRENT || mode > AudioSystem.MODE_IN_CALL) {
+ return;
+ }
+
synchronized (mSettingsLock) {
+ if (mode == AudioSystem.MODE_CURRENT) {
+ mode = mMode;
+ }
if (mode != mMode) {
- if (AudioSystem.setMode(mode) == AudioSystem.AUDIO_STATUS_OK) {
+ if (AudioSystem.setPhoneState(mode) == AudioSystem.AUDIO_STATUS_OK) {
mMode = mode;
}
}
int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
- int index = mStreamStates[streamType].mIndex;
- syncRingerAndNotificationStreamVolume(streamType, index, true);
- setStreamVolumeInt(streamType, index, true, true);
+ int index = mStreamStates[STREAM_VOLUME_ALIAS[streamType]].mIndex;
+ setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, true, true);
}
}
@@ -647,187 +582,6 @@ public class AudioService extends IAudioService.Stub {
return mMode;
}
- /** @see AudioManager#setRouting(int, int, int) */
- public void setRouting(int mode, int routes, int mask) {
- int incallMask = 0;
- int ringtoneMask = 0;
- int normalMask = 0;
-
- if (!checkAudioSettingsPermission("setRouting()")) {
- return;
- }
- synchronized (mSettingsLock) {
- // Temporary fix for issue #1713090 until audio routing is refactored in eclair release.
- // mode AudioSystem.MODE_INVALID is used only by the following AudioManager methods:
- // setWiredHeadsetOn(), setBluetoothA2dpOn(), setBluetoothScoOn() and setSpeakerphoneOn().
- // If applications are using AudioManager.setRouting() that is now deprecated, the routing
- // command will be ignored.
- if (mode == AudioSystem.MODE_INVALID) {
- switch (mask) {
- case AudioSystem.ROUTE_SPEAKER:
- // handle setSpeakerphoneOn()
- if (routes != 0 && !mSpeakerIsOn) {
- mSpeakerIsOn = true;
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
- incallMask = AudioSystem.ROUTE_ALL;
- } else if (routes == 0 && mSpeakerIsOn) {
- mSpeakerIsOn = false;
- if (mBluetoothScoIsConnected) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO;
- } else if (mHeadsetIsConnected) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
- } else {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
- }
- incallMask = AudioSystem.ROUTE_ALL;
- }
- break;
-
- case AudioSystem.ROUTE_BLUETOOTH_SCO:
- // handle setBluetoothScoOn()
- if (routes != 0 && !mBluetoothScoIsConnected) {
- mBluetoothScoIsConnected = true;
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO;
- mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_BLUETOOTH_SCO;
- mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_BLUETOOTH_SCO;
- incallMask = AudioSystem.ROUTE_ALL;
- // A2DP has higher priority than SCO headset, so headset connect/disconnect events
- // should not affect A2DP routing
- ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- } else if (routes == 0 && mBluetoothScoIsConnected) {
- mBluetoothScoIsConnected = false;
- if (mHeadsetIsConnected) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
- mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER);
- mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_HEADSET;
- } else {
- if (mSpeakerIsOn) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
- } else {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
- }
- mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_SPEAKER;
- mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_SPEAKER;
- }
- incallMask = AudioSystem.ROUTE_ALL;
- // A2DP has higher priority than SCO headset, so headset connect/disconnect events
- // should not affect A2DP routing
- ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- }
- break;
-
- case AudioSystem.ROUTE_HEADSET:
- // handle setWiredHeadsetOn()
- if (routes != 0 && !mHeadsetIsConnected) {
- mHeadsetIsConnected = true;
- // do not act upon headset connection if bluetooth SCO is connected to match phone app behavior
- if (!mBluetoothScoIsConnected) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET;
- mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER);
- mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_HEADSET;
- incallMask = AudioSystem.ROUTE_ALL;
- // A2DP has higher priority than wired headset, so headset connect/disconnect events
- // should not affect A2DP routing
- ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- }
- } else if (routes == 0 && mHeadsetIsConnected) {
- mHeadsetIsConnected = false;
- // do not act upon headset disconnection if bluetooth SCO is connected to match phone app behavior
- if (!mBluetoothScoIsConnected) {
- if (mSpeakerIsOn) {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER;
- } else {
- mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE;
- }
- mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_SPEAKER;
- mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) |
- AudioSystem.ROUTE_SPEAKER;
-
- incallMask = AudioSystem.ROUTE_ALL;
- // A2DP has higher priority than wired headset, so headset connect/disconnect events
- // should not affect A2DP routing
- ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- }
- }
- break;
-
- case AudioSystem.ROUTE_BLUETOOTH_A2DP:
- // handle setBluetoothA2dpOn()
- if (routes != 0 && !mBluetoothA2dpIsConnected) {
- mBluetoothA2dpIsConnected = true;
- mRoutes[AudioSystem.MODE_RINGTONE] |= AudioSystem.ROUTE_BLUETOOTH_A2DP;
- mRoutes[AudioSystem.MODE_NORMAL] |= AudioSystem.ROUTE_BLUETOOTH_A2DP;
- // the audio flinger chooses A2DP as a higher priority,
- // so there is no need to disable other routes.
- ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
- } else if (routes == 0 && mBluetoothA2dpIsConnected) {
- mBluetoothA2dpIsConnected = false;
- mRoutes[AudioSystem.MODE_RINGTONE] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- mRoutes[AudioSystem.MODE_NORMAL] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP;
- // the audio flinger chooses A2DP as a higher priority,
- // so there is no need to disable other routes.
- ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
- normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP;
- }
- break;
- }
-
- // incallMask is != 0 means we must apply ne routing to MODE_IN_CALL mode
- if (incallMask != 0) {
- AudioSystem.setRouting(AudioSystem.MODE_IN_CALL,
- mRoutes[AudioSystem.MODE_IN_CALL],
- incallMask);
- }
- // ringtoneMask is != 0 means we must apply ne routing to MODE_RINGTONE mode
- if (ringtoneMask != 0) {
- AudioSystem.setRouting(AudioSystem.MODE_RINGTONE,
- mRoutes[AudioSystem.MODE_RINGTONE],
- ringtoneMask);
- }
- // normalMask is != 0 means we must apply ne routing to MODE_NORMAL mode
- if (normalMask != 0) {
- AudioSystem.setRouting(AudioSystem.MODE_NORMAL,
- mRoutes[AudioSystem.MODE_NORMAL],
- normalMask);
- }
-
- int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE);
- int index = mStreamStates[streamType].mIndex;
- syncRingerAndNotificationStreamVolume(streamType, index, true);
- setStreamVolumeInt(streamType, index, true, true);
- }
- }
- }
-
- /** @see AudioManager#getRouting(int) */
- public int getRouting(int mode) {
- return mRoutes[mode];
- }
-
- /** @see AudioManager#isMusicActive() */
- public boolean isMusicActive() {
- return AudioSystem.isMusicActive();
- }
-
- /** @see AudioManager#setParameter(String, String) */
- public void setParameter(String key, String value) {
- AudioSystem.setParameter(key, value);
- }
-
/** @see AudioManager#playSoundEffect(int) */
public void playSoundEffect(int effectType) {
sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SHARED_MSG, SENDMSG_NOOP,
@@ -930,14 +684,28 @@ public class AudioService extends IAudioService.Stub {
if (streamType != AudioSystem.STREAM_BLUETOOTH_SCO) {
String settingName = System.VOLUME_SETTINGS[streamType];
String lastAudibleSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE;
-
- streamState.mIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver,
- settingName,
- AudioManager.DEFAULT_STREAM_VOLUME[streamType]));
- streamState.mLastAudibleIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver,
- lastAudibleSettingName,
- streamState.mIndex > 0 ? streamState.mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType]));
+ int index = Settings.System.getInt(mContentResolver,
+ settingName,
+ AudioManager.DEFAULT_STREAM_VOLUME[streamType]);
+ if (STREAM_VOLUME_ALIAS[streamType] != streamType) {
+ index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType);
+ } else {
+ index *= 10;
+ }
+ streamState.mIndex = streamState.getValidIndex(index);
+
+ index = (index + 5) / 10;
+ index = Settings.System.getInt(mContentResolver,
+ lastAudibleSettingName,
+ (index > 0) ? index : AudioManager.DEFAULT_STREAM_VOLUME[streamType]);
+ if (STREAM_VOLUME_ALIAS[streamType] != streamType) {
+ index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType);
+ } else {
+ index *= 10;
+ }
+ streamState.mLastAudibleIndex = streamState.getValidIndex(index);
}
+
// unmute stream that whas muted but is not affect by mute anymore
if (streamState.muteCount() != 0 && !isStreamAffectedByMute(streamType)) {
int size = streamState.mDeathHandlers.size();
@@ -948,7 +716,7 @@ public class AudioService extends IAudioService.Stub {
}
// apply stream volume
if (streamState.muteCount() == 0) {
- AudioSystem.setVolume(streamType, streamState.mVolumes[streamState.mIndex]);
+ setStreamVolumeIndex(streamType, streamState.mIndex);
}
}
@@ -969,7 +737,7 @@ public class AudioService extends IAudioService.Stub {
boolean adjustVolumeIndex = true;
int newRingerMode = mRingerMode;
- if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && oldIndex == 1
+ if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && (oldIndex + 5) / 10 == 1
&& direction == AudioManager.ADJUST_LOWER) {
newRingerMode = AudioManager.RINGER_MODE_VIBRATE;
} else if (mRingerMode == AudioManager.RINGER_MODE_VIBRATE) {
@@ -1026,7 +794,7 @@ public class AudioService extends IAudioService.Stub {
Log.w(TAG, "Couldn't connect to phone service", e);
}
- if ((getRouting(AudioSystem.MODE_IN_CALL) & AudioSystem.ROUTE_BLUETOOTH_SCO) != 0) {
+ if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_BT_SCO) {
// Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO...");
return AudioSystem.STREAM_BLUETOOTH_SCO;
} else if (isOffhook) {
@@ -1110,47 +878,36 @@ public class AudioService extends IAudioService.Stub {
private final String mLastAudibleVolumeIndexSettingName;
private final int mStreamType;
- private final int[] mVolumes;
+ private int mIndexMax;
private int mIndex;
private int mLastAudibleIndex;
private ArrayList<VolumeDeathHandler> mDeathHandlers; //handles mute/solo requests client death
- private VolumeStreamState(String settingName, int streamType, int[] volumes) {
+ private VolumeStreamState(String settingName, int streamType) {
mVolumeIndexSettingName = settingName;
mLastAudibleVolumeIndexSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE;
mStreamType = streamType;
- mVolumes = volumes;
final ContentResolver cr = mContentResolver;
- mIndex = getValidIndex(Settings.System.getInt(cr, mVolumeIndexSettingName, AudioManager.DEFAULT_STREAM_VOLUME[streamType]));
- mLastAudibleIndex = getValidIndex(Settings.System.getInt(cr,
- mLastAudibleVolumeIndexSettingName, mIndex > 0 ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType]));
-
- AudioSystem.setVolume(streamType, volumes[mIndex]);
- mDeathHandlers = new ArrayList<VolumeDeathHandler>();
- }
-
- /**
- * Constructor to be used when there is no setting associated with the VolumeStreamState.
- *
- * @param defaultVolume Default volume of the stream to use.
- * @param streamType Type of the stream.
- * @param volumes Volumes levels associated with this stream.
- */
- private VolumeStreamState(int defaultVolume, int streamType, int[] volumes) {
- mVolumeIndexSettingName = null;
- mLastAudibleVolumeIndexSettingName = null;
- mIndex = mLastAudibleIndex = defaultVolume;
- mStreamType = streamType;
- mVolumes = volumes;
- AudioSystem.setVolume(mStreamType, defaultVolume);
+ mIndexMax = AudioManager.MAX_STREAM_VOLUME[streamType];
+ mIndex = Settings.System.getInt(cr,
+ mVolumeIndexSettingName,
+ AudioManager.DEFAULT_STREAM_VOLUME[streamType]);
+ mLastAudibleIndex = Settings.System.getInt(cr,
+ mLastAudibleVolumeIndexSettingName,
+ (mIndex > 0) ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType]);
+ AudioSystem.initStreamVolume(streamType, 0, mIndexMax);
+ mIndexMax *= 10;
+ mIndex = getValidIndex(10 * mIndex);
+ mLastAudibleIndex = getValidIndex(10 * mLastAudibleIndex);
+ setStreamVolumeIndex(streamType, mIndex);
mDeathHandlers = new ArrayList<VolumeDeathHandler>();
}
public boolean adjustIndex(int deltaIndex) {
- return setIndex(mIndex + deltaIndex, true);
+ return setIndex(mIndex + deltaIndex * 10, true);
}
public boolean setIndex(int index, boolean lastAudible) {
@@ -1161,6 +918,13 @@ public class AudioService extends IAudioService.Stub {
if (lastAudible) {
mLastAudibleIndex = mIndex;
}
+ // Apply change to all streams using this one as alias
+ int numStreamTypes = AudioSystem.getNumStreamTypes();
+ for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
+ if (streamType != mStreamType && STREAM_VOLUME_ALIAS[streamType] == mStreamType) {
+ mStreamStates[streamType].setIndex(rescaleIndex(mIndex, mStreamType, streamType), lastAudible);
+ }
+ }
return true;
} else {
return false;
@@ -1168,7 +932,7 @@ public class AudioService extends IAudioService.Stub {
}
public int getMaxIndex() {
- return mVolumes.length - 1;
+ return mIndexMax;
}
public void mute(IBinder cb, boolean state) {
@@ -1183,8 +947,8 @@ public class AudioService extends IAudioService.Stub {
private int getValidIndex(int index) {
if (index < 0) {
return 0;
- } else if (index >= mVolumes.length) {
- return mVolumes.length - 1;
+ } else if (index > mIndexMax) {
+ return mIndexMax;
}
return index;
@@ -1318,8 +1082,16 @@ public class AudioService extends IAudioService.Stub {
private void setSystemVolume(VolumeStreamState streamState) {
// Adjust volume
- AudioSystem
- .setVolume(streamState.mStreamType, streamState.mVolumes[streamState.mIndex]);
+ setStreamVolumeIndex(streamState.mStreamType, streamState.mIndex);
+
+ // Apply change to all streams using this one as alias
+ int numStreamTypes = AudioSystem.getNumStreamTypes();
+ for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
+ if (streamType != streamState.mStreamType &&
+ STREAM_VOLUME_ALIAS[streamType] == streamState.mStreamType) {
+ setStreamVolumeIndex(streamType, mStreamStates[streamType].mIndex);
+ }
+ }
// Post a persist volume msg
sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, streamState.mStreamType,
@@ -1327,12 +1099,10 @@ public class AudioService extends IAudioService.Stub {
}
private void persistVolume(VolumeStreamState streamState) {
- if (streamState.mStreamType != AudioManager.STREAM_BLUETOOTH_SCO) {
- System.putInt(mContentResolver, streamState.mVolumeIndexSettingName,
- streamState.mIndex);
- System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName,
- streamState.mLastAudibleIndex);
- }
+ System.putInt(mContentResolver, streamState.mVolumeIndexSettingName,
+ (streamState.mIndex + 5)/ 10);
+ System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName,
+ (streamState.mLastAudibleIndex + 5) / 10);
}
private void persistRingerMode() {
@@ -1426,18 +1196,17 @@ public class AudioService extends IAudioService.Stub {
case MSG_MEDIA_SERVER_STARTED:
Log.e(TAG, "Media server started.");
- // Restore audio routing and stream volumes
- applyAudioSettings();
+ // Restore stream volumes
int numStreamTypes = AudioSystem.getNumStreamTypes();
for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) {
- int volume;
+ int index;
VolumeStreamState streamState = mStreamStates[streamType];
if (streamState.muteCount() == 0) {
- volume = streamState.mVolumes[streamState.mIndex];
+ index = streamState.mIndex;
} else {
- volume = streamState.mVolumes[0];
+ index = 0;
}
- AudioSystem.setVolume(streamType, volume);
+ setStreamVolumeIndex(streamType, index);
}
setRingerMode(mRingerMode);
mMediaServerOk = true;
@@ -1451,28 +1220,144 @@ public class AudioService extends IAudioService.Stub {
}
private class SettingsObserver extends ContentObserver {
-
+
SettingsObserver() {
super(new Handler());
mContentResolver.registerContentObserver(Settings.System.getUriFor(
Settings.System.MODE_RINGER_STREAMS_AFFECTED), false, this);
+ mContentResolver.registerContentObserver(Settings.System.getUriFor(
+ Settings.System.NOTIFICATIONS_USE_RING_VOLUME), false, this);
}
@Override
public void onChange(boolean selfChange) {
super.onChange(selfChange);
-
- mRingerModeAffectedStreams = Settings.System.getInt(mContentResolver,
- Settings.System.MODE_RINGER_STREAMS_AFFECTED,
- 0);
+ synchronized (mSettingsLock) {
+ int ringerModeAffectedStreams = Settings.System.getInt(mContentResolver,
+ Settings.System.MODE_RINGER_STREAMS_AFFECTED,
+ 0);
+ if (ringerModeAffectedStreams != mRingerModeAffectedStreams) {
+ /*
+ * Ensure all stream types that should be affected by ringer mode
+ * are in the proper state.
+ */
+ mRingerModeAffectedStreams = ringerModeAffectedStreams;
+ setRingerModeInt(getRingerMode(), false);
+ }
- /*
- * Ensure all stream types that should be affected by ringer mode
- * are in the proper state.
- */
- setRingerModeInt(getRingerMode(), false);
+ int notificationsUseRingVolume = Settings.System.getInt(mContentResolver,
+ Settings.System.NOTIFICATIONS_USE_RING_VOLUME,
+ 1);
+ if (notificationsUseRingVolume != mNotificationsUseRingVolume) {
+ mNotificationsUseRingVolume = notificationsUseRingVolume;
+ if (mNotificationsUseRingVolume == 1) {
+ STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING;
+ } else {
+ STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_NOTIFICATION;
+ // Persist notification volume volume as it was not persisted while aliased to ring volume
+ sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, AudioSystem.STREAM_NOTIFICATION,
+ SENDMSG_REPLACE, 0, 0, mStreamStates[AudioSystem.STREAM_NOTIFICATION], PERSIST_DELAY);
+ }
+ }
+ }
}
-
}
-
+
+ /**
+ * Receiver for misc intent broadcasts the Phone app cares about.
+ */
+ private class AudioServiceBroadcastReceiver extends BroadcastReceiver {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ String action = intent.getAction();
+
+ if (action.equals(BluetoothA2dp.SINK_STATE_CHANGED_ACTION)) {
+ int state = intent.getIntExtra(BluetoothA2dp.SINK_STATE,
+ BluetoothA2dp.STATE_DISCONNECTED);
+ String address = intent.getStringExtra(BluetoothIntent.ADDRESS);
+ if (state == BluetoothA2dp.STATE_DISCONNECTED) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ address);
+ } else if (state == BluetoothA2dp.STATE_CONNECTED){
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ address);
+ }
+ } else if (action.equals(BluetoothIntent.HEADSET_AUDIO_STATE_CHANGED_ACTION)) {
+ int state = intent.getIntExtra(BluetoothIntent.HEADSET_AUDIO_STATE,
+ BluetoothHeadset.STATE_ERROR);
+ String address = intent.getStringExtra(BluetoothIntent.ADDRESS);
+ if (state == BluetoothHeadset.AUDIO_STATE_DISCONNECTED) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_SCO,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ address);
+ } else if (state == BluetoothHeadset.AUDIO_STATE_CONNECTED) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_SCO,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ address);
+ }
+ } else if (action.equals(Intent.ACTION_HEADSET_PLUG)) {
+ int state = intent.getIntExtra("state", 0);
+ if ((state & BIT_HEADSET) == 0 &&
+ (mHeadsetState & BIT_HEADSET) != 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ "");
+ } else if ((state & BIT_HEADSET) != 0 &&
+ (mHeadsetState & BIT_HEADSET) == 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ "");
+ }
+ if ((state & BIT_HEADSET_NO_MIC) == 0 &&
+ (mHeadsetState & BIT_HEADSET_NO_MIC) != 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ "");
+ } else if ((state & BIT_HEADSET_NO_MIC) != 0 &&
+ (mHeadsetState & BIT_HEADSET_NO_MIC) == 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ "");
+ }
+ if ((state & BIT_TTY) == 0 &&
+ (mHeadsetState & BIT_TTY) != 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ "");
+ } else if ((state & BIT_TTY) != 0 &&
+ (mHeadsetState & BIT_TTY) == 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ "");
+ }
+ if ((state & BIT_FM_HEADSET) == 0 &&
+ (mHeadsetState & BIT_FM_HEADSET) != 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ "");
+ } else if ((state & BIT_FM_HEADSET) != 0 &&
+ (mHeadsetState & BIT_FM_HEADSET) == 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ "");
+ }
+ if ((state & BIT_FM_SPEAKER) == 0 &&
+ (mHeadsetState & BIT_FM_SPEAKER) != 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE,
+ "");
+ } else if ((state & BIT_FM_SPEAKER) != 0 &&
+ (mHeadsetState & BIT_FM_SPEAKER) == 0) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER,
+ AudioSystem.DEVICE_STATE_AVAILABLE,
+ "");
+ }
+ mHeadsetState = state;
+ }
+ }
+ }
+
+
}
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 5917ab9..d587f65 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -45,38 +45,21 @@ public class AudioSystem
public static final int STREAM_NOTIFICATION = 5;
/* @hide The audio stream for phone calls when connected on bluetooth */
public static final int STREAM_BLUETOOTH_SCO = 6;
+ /* @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */
+ public static final int STREAM_SYSTEM_ENFORCED = 7;
+ /* @hide The audio stream for DTMF tones */
+ public static final int STREAM_DTMF = 8;
+ /* @hide The audio stream for text to speech (TTS) */
+ public static final int STREAM_TTS = 9;
/**
* @deprecated Use {@link #numStreamTypes() instead}
*/
public static final int NUM_STREAMS = 5;
// Expose only the getter method publicly so we can change it in the future
- private static final int NUM_STREAM_TYPES = 7;
+ private static final int NUM_STREAM_TYPES = 10;
public static final int getNumStreamTypes() { return NUM_STREAM_TYPES; }
- /* max and min volume levels */
- /* Maximum volume setting, for use with setVolume(int,int) */
- public static final int MAX_VOLUME = 100;
- /* Minimum volume setting, for use with setVolume(int,int) */
- public static final int MIN_VOLUME = 0;
-
- /*
- * Sets the volume of a specified audio stream.
- *
- * param type the stream type to set the volume of (e.g. STREAM_MUSIC)
- * param volume the volume level to set (0-100)
- * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR
- */
- public static native int setVolume(int type, int volume);
-
- /*
- * Returns the volume of a specified audio stream.
- *
- * param type the stream type to get the volume of (e.g. STREAM_MUSIC)
- * return the current volume (0-100)
- */
- public static native int getVolume(int type);
-
/*
* Sets the microphone mute on or off.
*
@@ -101,17 +84,22 @@ public class AudioSystem
* it can route the audio appropriately.
* return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR
*/
- public static native int setMode(int mode);
-
+ /** @deprecated use {@link #setPhoneState(int)} */
+ public static int setMode(int mode) {
+ return AUDIO_STATUS_ERROR;
+ }
/*
* Returns the current audio mode.
*
* return the current audio mode (NORMAL, RINGTONE, or IN_CALL).
* Returns the current current audio state from the HAL.
*/
- public static native int getMode();
+ /** @deprecated */
+ public static int getMode() {
+ return MODE_INVALID;
+ }
- /* modes for setMode/getMode/setRoute/getRoute */
+ /* modes for setPhoneState */
public static final int MODE_INVALID = -2;
public static final int MODE_CURRENT = -1;
public static final int MODE_NORMAL = 0;
@@ -121,15 +109,20 @@ public class AudioSystem
/* Routing bits for setRouting/getRouting API */
- public static final int ROUTE_EARPIECE = (1 << 0);
- public static final int ROUTE_SPEAKER = (1 << 1);
-
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_EARPIECE = (1 << 0);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_SPEAKER = (1 << 1);
/** @deprecated use {@link #ROUTE_BLUETOOTH_SCO} */
@Deprecated public static final int ROUTE_BLUETOOTH = (1 << 2);
- public static final int ROUTE_BLUETOOTH_SCO = (1 << 2);
- public static final int ROUTE_HEADSET = (1 << 3);
- public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4);
- public static final int ROUTE_ALL = 0xFFFFFFFF;
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_BLUETOOTH_SCO = (1 << 2);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_HEADSET = (1 << 3);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF;
/*
* Sets the audio routing for a specified mode
@@ -141,7 +134,10 @@ public class AudioSystem
* ROUTE_xxx types. Unset bits indicate the route should be left unchanged
* return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR
*/
- public static native int setRouting(int mode, int routes, int mask);
+ /** @deprecated use {@link #setDeviceConnectionState(int,int,String)} */
+ public static int setRouting(int mode, int routes, int mask) {
+ return AUDIO_STATUS_ERROR;
+ }
/*
* Returns the current audio routing bit vector for a specified mode.
@@ -150,7 +146,10 @@ public class AudioSystem
* return an audio route bit vector that can be compared with ROUTE_xxx
* bits
*/
- public static native int getRouting(int mode);
+ /** @deprecated use {@link #getDeviceConnectionState(int,String)} */
+ public static int getRouting(int mode) {
+ return 0;
+ }
/*
* Checks whether any music is active.
@@ -160,17 +159,23 @@ public class AudioSystem
public static native boolean isMusicActive();
/*
- * Sets a generic audio configuration parameter. The use of these parameters
+ * Sets a group generic audio configuration parameters. The use of these parameters
* are platform dependant, see libaudio
*
- * ** Temporary interface - DO NOT USE
- *
- * TODO: Replace with a more generic key:value get/set mechanism
+ * param keyValuePairs list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ */
+ public static native int setParameters(String keyValuePairs);
+
+ /*
+ * Gets a group generic audio configuration parameters. The use of these parameters
+ * are platform dependant, see libaudio
*
- * param key name of parameter to set. Must not be null.
- * param value value of parameter. Must not be null.
+ * param keys list of parameters
+ * return value: list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
*/
- public static native void setParameter(String key, String value);
+ public static native String getParameters(String keys);
/*
private final static String TAG = "audio";
@@ -220,4 +225,68 @@ public class AudioSystem
mErrorCallback.onError(error);
}
}
+
+ /*
+ * AudioPolicyService methods
+ */
+
+ // output devices
+ public static final int DEVICE_OUT_EARPIECE = 0x1;
+ public static final int DEVICE_OUT_SPEAKER = 0x2;
+ public static final int DEVICE_OUT_WIRED_HEADSET = 0x4;
+ public static final int DEVICE_OUT_WIRED_HEADPHONE = 0x8;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO = 0x10;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_HEADSET = 0x20;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_CARKIT = 0x40;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP = 0x80;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200;
+ public static final int DEVICE_OUT_AUX_DIGITAL = 0x400;
+ public static final int DEVICE_OUT_FM_HEADPHONE = 0x800;
+ public static final int DEVICE_OUT_FM_SPEAKER = 0x1000;
+ public static final int DEVICE_OUT_TTY = 0x2000;
+ public static final int DEVICE_OUT_DEFAULT = 0x8000;
+ // input devices
+ public static final int DEVICE_IN_COMMUNICATION = 0x10000;
+ public static final int DEVICE_IN_AMBIENT = 0x20000;
+ public static final int DEVICE_IN_BUILTIN_MIC1 = 0x40000;
+ public static final int DEVICE_IN_BUILTIN_MIC2 = 0x80000;
+ public static final int DEVICE_IN_MIC_ARRAY = 0x100000;
+ public static final int DEVICE_IN_BLUETOOTH_SCO_HEADSET = 0x200000;
+ public static final int DEVICE_IN_WIRED_HEADSET = 0x400000;
+ public static final int DEVICE_IN_AUX_DIGITAL = 0x800000;
+ public static final int DEVICE_IN_DEFAULT = 0x80000000;
+
+ // device states
+ public static final int DEVICE_STATE_UNAVAILABLE = 0;
+ public static final int DEVICE_STATE_AVAILABLE = 1;
+
+ // phone state
+ public static final int PHONE_STATE_OFFCALL = 0;
+ public static final int PHONE_STATE_RINGING = 1;
+ public static final int PHONE_STATE_INCALL = 2;
+
+ // config for setForceUse
+ public static final int FORCE_NONE = 0;
+ public static final int FORCE_SPEAKER = 1;
+ public static final int FORCE_HEADPHONES = 2;
+ public static final int FORCE_BT_SCO = 3;
+ public static final int FORCE_BT_A2DP = 4;
+ public static final int FORCE_WIRED_ACCESSORY = 5;
+ public static final int FORCE_DEFAULT = FORCE_NONE;
+
+ // usage for serForceUse
+ public static final int FOR_COMMUNICATION = 0;
+ public static final int FOR_MEDIA = 1;
+ public static final int FOR_RECORD = 2;
+
+ public static native int setDeviceConnectionState(int device, int state, String device_address);
+ public static native int getDeviceConnectionState(int device, String device_address);
+ public static native int setPhoneState(int state);
+ public static native int setRingerMode(int mode, int mask);
+ public static native int setForceUse(int usage, int config);
+ public static native int getForceUse(int usage);
+ public static native int initStreamVolume(int stream, int indexMin, int indexMax);
+ public static native int setStreamVolumeIndex(int stream, int index);
+ public static native int getStreamVolumeIndex(int stream);
}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 5f1be9d..7fbe965 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -120,7 +120,7 @@ public class AudioTrack
public static final int ERROR_INVALID_OPERATION = -3;
private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16;
- private static final int ERROR_NATIVESETUP_INVALIDCHANNELCOUNT = -17;
+ private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17;
private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18;
private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19;
private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20;
@@ -198,7 +198,7 @@ public class AudioTrack
/**
* The current audio channel configuration.
*/
- private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
/**
* The encoding of the audio samples.
* @see AudioFormat#ENCODING_PCM_8BIT
@@ -235,8 +235,8 @@ public class AudioTrack
* @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but
* not limited to) 44100, 22050 and 11025.
* @param channelConfig describes the configuration of the audio channels.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
- * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}
+ * See {@link AudioFormat#CHANNEL_OUT_MONO} and
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
@@ -298,7 +298,8 @@ public class AudioTrack
&& (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
&& (streamType != AudioManager.STREAM_VOICE_CALL)
&& (streamType != AudioManager.STREAM_NOTIFICATION)
- && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)) {
+ && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
+ && (streamType != AudioManager.STREAM_DTMF)) {
throw (new IllegalArgumentException("Invalid stream type."));
} else {
mStreamType = streamType;
@@ -316,18 +317,18 @@ public class AudioTrack
//--------------
// channel config
switch (channelConfig) {
- case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT:
- case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ case AudioFormat.CHANNEL_OUT_DEFAULT:
+ case AudioFormat.CHANNEL_OUT_MONO:
mChannelCount = 1;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
break;
- case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ case AudioFormat.CHANNEL_OUT_STEREO:
mChannelCount = 2;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ mChannelConfiguration = AudioFormat.CHANNEL_OUT_STEREO;
break;
default:
mChannelCount = 0;
- mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID;
+ mChannelConfiguration = AudioFormat.CHANNEL_INVALID;
throw(new IllegalArgumentException("Unsupported channel configuration."));
}
@@ -452,8 +453,8 @@ public class AudioTrack
/**
* Returns the configured channel configuration.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO}
- * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}.
+ * See {@link AudioFormat#CHANNEL_OUT_MONO}
+ * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
*/
public int getChannelConfiguration() {
return mChannelConfiguration;
@@ -531,8 +532,8 @@ public class AudioTrack
* the expected frequency at which the buffer will be refilled with additional data to play.
* @param sampleRateInHz the sample rate expressed in Hertz.
* @param channelConfig describes the configuration of the audio channels.
- * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and
- * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}
+ * See {@link AudioFormat#CHANNEL_OUT_MONO} and
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
@@ -544,10 +545,10 @@ public class AudioTrack
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
switch(channelConfig) {
- case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ case AudioFormat.CHANNEL_OUT_MONO:
channelCount = 1;
break;
- case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ case AudioFormat.CHANNEL_OUT_STEREO:
channelCount = 2;
break;
default:
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index 9a8264f..bb4252b 100644
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -29,9 +29,9 @@ interface IAudioService {
void setStreamVolume(int streamType, int index, int flags);
- void setStreamSolo(int streamType, boolean state, IBinder cb);
+ void setStreamSolo(int streamType, boolean state, IBinder cb);
- void setStreamMute(int streamType, boolean state, IBinder cb);
+ void setStreamMute(int streamType, boolean state, IBinder cb);
int getStreamVolume(int streamType);
@@ -46,23 +46,11 @@ interface IAudioService {
int getVibrateSetting(int vibrateType);
boolean shouldVibrate(int vibrateType);
-
- void setMicrophoneMute(boolean on);
-
- boolean isMicrophoneMute();
void setMode(int mode);
int getMode();
- void setRouting(int mode, int routes, int mask);
-
- int getRouting(int mode);
-
- boolean isMusicActive();
-
- void setParameter(String key, String value);
-
oneway void playSoundEffect(int effectType);
oneway void playSoundEffectVolume(int effectType, float volume);
diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java
index 4fb0ead..2263605 100644
--- a/media/java/android/media/JetPlayer.java
+++ b/media/java/android/media/JetPlayer.java
@@ -89,7 +89,7 @@ public class JetPlayer
// Jet rendering audio parameters
private static final int JET_OUTPUT_RATE = 22050; // _SAMPLE_RATE_22050 in Android.mk
private static final int JET_OUTPUT_CHANNEL_CONFIG =
- AudioFormat.CHANNEL_CONFIGURATION_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk
+ AudioFormat.CHANNEL_OUT_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk
//--------------------------------------------
diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java
index 8be11df..03ffc67 100644
--- a/media/java/android/media/MediaFile.java
+++ b/media/java/android/media/MediaFile.java
@@ -41,8 +41,9 @@ public class MediaFile {
public static final int FILE_TYPE_AWB = 5;
public static final int FILE_TYPE_WMA = 6;
public static final int FILE_TYPE_OGG = 7;
+ public static final int FILE_TYPE_AAC = 8;
private static final int FIRST_AUDIO_FILE_TYPE = FILE_TYPE_MP3;
- private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_OGG;
+ private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_AAC;
// MIDI file types
public static final int FILE_TYPE_MID = 11;
@@ -57,8 +58,9 @@ public class MediaFile {
public static final int FILE_TYPE_3GPP = 23;
public static final int FILE_TYPE_3GPP2 = 24;
public static final int FILE_TYPE_WMV = 25;
+ public static final int FILE_TYPE_ASF = 26;
private static final int FIRST_VIDEO_FILE_TYPE = FILE_TYPE_MP4;
- private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_WMV;
+ private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_ASF;
// Image file types
public static final int FILE_TYPE_JPEG = 31;
@@ -104,6 +106,7 @@ public class MediaFile {
addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma");
addFileType("OGG", FILE_TYPE_OGG, "application/ogg");
addFileType("OGA", FILE_TYPE_OGG, "application/ogg");
+ addFileType("AAC", FILE_TYPE_AAC, "audio/aac");
addFileType("MID", FILE_TYPE_MID, "audio/midi");
addFileType("MIDI", FILE_TYPE_MID, "audio/midi");
@@ -121,6 +124,7 @@ public class MediaFile {
addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2");
addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2");
addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv");
+ addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf");
addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg");
addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg");
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index 3b46d69..a23f535 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -23,6 +23,7 @@ import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
+import android.os.Parcel;
import android.os.ParcelFileDescriptor;
import android.os.PowerManager;
import android.util.Log;
@@ -33,7 +34,7 @@ import android.media.AudioManager;
import java.io.FileDescriptor;
import java.io.IOException;
-
+import java.util.Set;
import java.lang.ref.WeakReference;
/**
@@ -430,11 +431,48 @@ import java.lang.ref.WeakReference;
*/
public class MediaPlayer
{
+ /**
+ Constant to retrieve only the new metadata since the last
+ call.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean METADATA_UPDATE_ONLY = true;
+
+ /**
+ Constant to retrieve all the metadata.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean METADATA_ALL = false;
+
+ /**
+ Constant to enable the metadata filter during retrieval.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean APPLY_METADATA_FILTER = true;
+
+ /**
+ Constant to disable the metadata filter during retrieval.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean BYPASS_METADATA_FILTER = false;
+
static {
System.loadLibrary("media_jni");
}
private final static String TAG = "MediaPlayer";
+ // Name of the remote interface for the media player. Must be kept
+ // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE
+ // macro invocation in IMediaPlayer.cpp
+ private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer";
private int mNativeContext; // accessed by native methods
private int mListenerContext; // accessed by native methods
@@ -475,6 +513,43 @@ public class MediaPlayer
private native void _setVideoSurface();
/**
+ * Create a request parcel which can be routed to the native media
+ * player using {@link #invoke(Parcel, Parcel)}. The Parcel
+ * returned has the proper InterfaceToken set. The caller should
+ * not overwrite that token, i.e it can only append data to the
+ * Parcel.
+ *
+ * @return A parcel suitable to hold a request for the native
+ * player.
+ */
+ public Parcel newRequest() {
+ Parcel parcel = Parcel.obtain();
+ parcel.writeInterfaceToken(IMEDIA_PLAYER);
+ return parcel;
+ }
+
+ /**
+ * Invoke a generic method on the native player using opaque
+ * parcels for the request and reply. Both payloads' format is a
+ * convention between the java caller and the native player.
+ * Must be called after setDataSource to make sure a native player
+ * exists.
+ *
+ * @param request Parcel with the data for the extension. The
+ * caller must use {@link #newRequest()} to get one.
+ *
+ * @param[out] reply Parcel with the data returned by the
+ * native player.
+ *
+ * @return The status code see utils/Errors.h
+ */
+ public int invoke(Parcel request, Parcel reply) {
+ int retcode = native_invoke(request, reply);
+ reply.setDataPosition(0);
+ return retcode;
+ }
+
+ /**
* Sets the SurfaceHolder to use for displaying the video portion of the media.
* This call is optional. Not calling it when playing back a video will
* result in only the audio track being played.
@@ -838,6 +913,89 @@ public class MediaPlayer
public native int getDuration();
/**
+ * Gets the media metadata.
+ *
+ * @param update_only controls whether the full set of available
+ * metadata is returned or just the set that changed since the
+ * last call. See {@see #METADATA_UPDATE_ONLY} and {@see
+ * #METADATA_ALL}.
+ *
+ * @param apply_filter if true only metadata that matches the
+ * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see
+ * #BYPASS_METADATA_FILTER}.
+ *
+ * @return The metadata, possibly empty. null if an error occured.
+ // FIXME: unhide.
+ * {@hide}
+ */
+ public Metadata getMetadata(final boolean update_only,
+ final boolean apply_filter) {
+ Parcel reply = Parcel.obtain();
+ Metadata data = new Metadata();
+
+ if (!native_getMetadata(update_only, apply_filter, reply)) {
+ reply.recycle();
+ return null;
+ }
+
+ // Metadata takes over the parcel, don't recycle it unless
+ // there is an error.
+ if (!data.parse(reply)) {
+ reply.recycle();
+ return null;
+ }
+ return data;
+ }
+
+ /**
+ * Set a filter for the metadata update notification and update
+ * retrieval. The caller provides 2 set of metadata keys, allowed
+ * and blocked. The blocked set always takes precedence over the
+ * allowed one.
+ * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as
+ * shorthands to allow/block all or no metadata.
+ *
+ * By default, there is no filter set.
+ *
+ * @param allow Is the set of metadata the client is interested
+ * in receiving new notifications for.
+ * @param block Is the set of metadata the client is not interested
+ * in receiving new notifications for.
+ * @return The call status code.
+ *
+ // FIXME: unhide.
+ * {@hide}
+ */
+ public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) {
+ // Do our serialization manually instead of calling
+ // Parcel.writeArray since the sets are made of the same type
+ // we avoid paying the price of calling writeValue (used by
+ // writeArray) which burns an extra int per element to encode
+ // the type.
+ Parcel request = newRequest();
+
+ // The parcel starts already with an interface token. There
+ // are 2 filters. Each one starts with a 4bytes number to
+ // store the len followed by a number of int (4 bytes as well)
+ // representing the metadata type.
+ int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size());
+
+ if (request.dataCapacity() < capacity) {
+ request.setDataCapacity(capacity);
+ }
+
+ request.writeInt(allow.size());
+ for(Integer t: allow) {
+ request.writeInt(t);
+ }
+ request.writeInt(block.size());
+ for(Integer t: block) {
+ request.writeInt(t);
+ }
+ return native_setMetadataFilter(request);
+ }
+
+ /**
* Releases resources associated with this MediaPlayer object.
* It is considered good practice to call this method when you're
* done using the MediaPlayer.
@@ -915,8 +1073,45 @@ public class MediaPlayer
*/
public native Bitmap getFrameAt(int msec) throws IllegalStateException;
+ /**
+ * @param request Parcel destinated to the media player. The
+ * Interface token must be set to the IMediaPlayer
+ * one to be routed correctly through the system.
+ * @param reply[out] Parcel that will contain the reply.
+ * @return The status code.
+ */
+ private native final int native_invoke(Parcel request, Parcel reply);
+
+
+ /**
+ * @param update_only If true fetch only the set of metadata that have
+ * changed since the last invocation of getMetadata.
+ * The set is built using the unfiltered
+ * notifications the native player sent to the
+ * MediaPlayerService during that period of
+ * time. If false, all the metadatas are considered.
+ * @param apply_filter If true, once the metadata set has been built based on
+ * the value update_only, the current filter is applied.
+ * @param reply[out] On return contains the serialized
+ * metadata. Valid only if the call was successful.
+ * @return The status code.
+ */
+ private native final boolean native_getMetadata(boolean update_only,
+ boolean apply_filter,
+ Parcel reply);
+
+ /**
+ * @param request Parcel with the 2 serialized lists of allowed
+ * metadata types followed by the one to be
+ * dropped. Each list starts with an integer
+ * indicating the number of metadata type elements.
+ * @return The status code.
+ */
+ private native final int native_setMetadataFilter(Parcel request);
+
private native final void native_setup(Object mediaplayer_this);
private native final void native_finalize();
+
@Override
protected void finalize() { native_finalize(); }
@@ -1254,6 +1449,11 @@ public class MediaPlayer
*/
public static final int MEDIA_INFO_NOT_SEEKABLE = 801;
+ /** A new set of metadata is available.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_METADATA_UPDATE = 802;
+
/**
* Interface definition of a callback to be invoked to communicate some
* info and/or warning about the media or its playback.
@@ -1270,6 +1470,7 @@ public class MediaPlayer
* <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING}
* <li>{@link #MEDIA_INFO_BAD_INTERLEAVING}
* <li>{@link #MEDIA_INFO_NOT_SEEKABLE}
+ * <li>{@link #MEDIA_INFO_METADATA_UPDATE}
* </ul>
* @param extra an extra code, specific to the info. Typically
* implementation dependant.
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index 95f3680..d5801f7 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -307,10 +307,14 @@ public class MediaScanner
private boolean mDefaultRingtoneSet;
/** Whether the scanner has set a default sound for the notification ringtone. */
private boolean mDefaultNotificationSet;
+ /** Whether the scanner has set a default sound for the alarm ringtone. */
+ private boolean mDefaultAlarmSet;
/** The filename for the default sound for the ringer ringtone. */
private String mDefaultRingtoneFilename;
/** The filename for the default sound for the notification ringtone. */
private String mDefaultNotificationFilename;
+ /** The filename for the default sound for the alarm ringtone. */
+ private String mDefaultAlarmAlertFilename;
/**
* The prefix for system properties that define the default sound for
* ringtones. Concatenate the name of the setting from Settings
@@ -369,6 +373,8 @@ public class MediaScanner
+ Settings.System.RINGTONE);
mDefaultNotificationFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX
+ Settings.System.NOTIFICATION_SOUND);
+ mDefaultAlarmAlertFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX
+ + Settings.System.ALARM_ALERT);
}
private MyMediaScannerClient mClient = new MyMediaScannerClient();
@@ -759,6 +765,12 @@ public class MediaScanner
setSettingIfNotSet(Settings.System.RINGTONE, tableUri, rowId);
mDefaultRingtoneSet = true;
}
+ } else if (alarms && !mDefaultAlarmSet) {
+ if (TextUtils.isEmpty(mDefaultAlarmAlertFilename) ||
+ doesPathHaveFilename(entry.mPath, mDefaultAlarmAlertFilename)) {
+ setSettingIfNotSet(Settings.System.ALARM_ALERT, tableUri, rowId);
+ mDefaultAlarmSet = true;
+ }
}
return result;
diff --git a/media/java/android/media/Metadata.java b/media/java/android/media/Metadata.java
new file mode 100644
index 0000000..bd25da2
--- /dev/null
+++ b/media/java/android/media/Metadata.java
@@ -0,0 +1,418 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.Bitmap;
+import android.os.Parcel;
+import android.util.Log;
+
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.TimeZone;
+
+
+/**
+ Class to hold the media's metadata. Metadata are used
+ for human consumption and can be embedded in the media (e.g
+ shoutcast) or available from an external source. The source can be
+ local (e.g thumbnail stored in the DB) or remote (e.g caption
+ server).
+
+ Metadata is like a Bundle. It is sparse and each key can occur at
+ most once. The key is an integer and the value is the actual metadata.
+
+ The caller is expected to know the type of the metadata and call
+ the right get* method to fetch its value.
+
+ // FIXME: unhide.
+ {@hide}
+ */
+public class Metadata
+{
+ // The metadata are keyed using integers rather than more heavy
+ // weight strings. We considered using Bundle to ship the metadata
+ // between the native layer and the java layer but dropped that
+ // option since keeping in sync a native implementation of Bundle
+ // and the java one would be too burdensome. Besides Bundle uses
+ // String for its keys.
+ // The key range [0 8192) is reserved for the system.
+ //
+ // We manually serialize the data in Parcels. For large memory
+ // blob (bitmaps, raw pictures) we use MemoryFile which allow the
+ // client to make the data purge-able once it is done with it.
+ //
+
+ public static final int ANY = 0; // Never used for metadata returned, only for filtering.
+ // Keep in sync with kAny in MediaPlayerService.cpp
+
+ // TODO: Should we use numbers compatible with the metadata retriever?
+ public static final int TITLE = 1; // String
+ public static final int COMMENT = 2; // String
+ public static final int COPYRIGHT = 3; // String
+ public static final int ALBUM = 4; // String
+ public static final int ARTIST = 5; // String
+ public static final int AUTHOR = 6; // String
+ public static final int COMPOSER = 7; // String
+ public static final int GENRE = 8; // String
+ public static final int DATE = 9; // Date
+ public static final int DURATION = 10; // Integer(millisec)
+ public static final int CD_TRACK_NUM = 11; // Integer 1-based
+ public static final int CD_TRACK_MAX = 12; // Integer
+ public static final int RATING = 13; // String
+ public static final int ALBUM_ART = 14; // byte[]
+ public static final int VIDEO_FRAME = 15; // Bitmap
+ public static final int CAPTION = 16; // TimedText
+
+ public static final int BIT_RATE = 17; // Integer, Aggregate rate of
+ // all the streams in bps.
+
+ public static final int AUDIO_BIT_RATE = 18; // Integer, bps
+ public static final int VIDEO_BIT_RATE = 19; // Integer, bps
+ public static final int AUDIO_SAMPLE_RATE = 20; // Integer, Hz
+ public static final int VIDEO_FRAME_RATE = 21; // Integer, Hz
+
+ // See RFC2046 and RFC4281.
+ public static final int MIME_TYPE = 22; // String
+ public static final int AUDIO_CODEC = 23; // String
+ public static final int VIDEO_CODEC = 24; // String
+
+ public static final int VIDEO_HEIGHT = 25; // Integer
+ public static final int VIDEO_WIDTH = 26; // Integer
+ public static final int NUM_TRACKS = 27; // Integer
+ public static final int DRM_CRIPPLED = 28; // Boolean
+
+ // Playback capabilities.
+ public static final int PAUSE_AVAILABLE = 29; // Boolean
+ public static final int SEEK_BACKWARD_AVAILABLE = 30; // Boolean
+ public static final int SEEK_FORWARD_AVAILABLE = 31; // Boolean
+
+ private static final int LAST_SYSTEM = 31;
+ private static final int FIRST_CUSTOM = 8192;
+
+ // Shorthands to set the MediaPlayer's metadata filter.
+ public static final Set<Integer> MATCH_NONE = Collections.EMPTY_SET;
+ public static final Set<Integer> MATCH_ALL = Collections.singleton(ANY);
+
+ public static final int STRING_VAL = 1;
+ public static final int INTEGER_VAL = 2;
+ public static final int BOOLEAN_VAL = 3;
+ public static final int LONG_VAL = 4;
+ public static final int DOUBLE_VAL = 5;
+ public static final int TIMED_TEXT_VAL = 6;
+ public static final int DATE_VAL = 7;
+ public static final int BYTE_ARRAY_VAL = 8;
+ // FIXME: misses a type for shared heap is missing (MemoryFile).
+ // FIXME: misses a type for bitmaps.
+ private static final int LAST_TYPE = 8;
+
+ private static final String TAG = "media.Metadata";
+ private static final int kInt32Size = 4;
+ private static final int kMetaHeaderSize = 2 * kInt32Size; // size + marker
+ private static final int kRecordHeaderSize = 3 * kInt32Size; // size + id + type
+
+ private static final int kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A'
+
+ // After a successful parsing, set the parcel with the serialized metadata.
+ private Parcel mParcel;
+
+ // Map to associate a Metadata key (e.g TITLE) with the offset of
+ // the record's payload in the parcel.
+ // Used to look up if a key was present too.
+ // Key: Metadata ID
+ // Value: Offset of the metadata type field in the record.
+ private final HashMap<Integer, Integer> mKeyToPosMap =
+ new HashMap<Integer, Integer>();
+
+ /**
+ * Helper class to hold a triple (time, duration, text). Can be used to
+ * implement caption.
+ */
+ public class TimedText {
+ private Date mTime;
+ private int mDuration; // millisec
+ private String mText;
+
+ public TimedText(Date time, int duration, String text) {
+ mTime = time;
+ mDuration = duration;
+ mText = text;
+ }
+
+ public String toString() {
+ StringBuilder res = new StringBuilder(80);
+ res.append(mTime).append("-").append(mDuration)
+ .append(":").append(mText);
+ return res.toString();
+ }
+ }
+
+ public Metadata() { }
+
+ /**
+ * Go over all the records, collecting metadata keys and records'
+ * type field offset in the Parcel. These are stored in
+ * mKeyToPosMap for latter retrieval.
+ * Format of a metadata record:
+ <pre>
+ 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | record size |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata key | // TITLE
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata type | // STRING_VAL
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | |
+ | .... metadata payload .... |
+ | |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ </pre>
+ * @param parcel With the serialized records.
+ * @param bytesLeft How many bytes in the parcel should be processed.
+ * @return false if an error occurred during parsing.
+ */
+ private boolean scanAllRecords(Parcel parcel, int bytesLeft) {
+ int recCount = 0;
+ boolean error = false;
+
+ mKeyToPosMap.clear();
+ while (bytesLeft > kRecordHeaderSize) {
+ final int start = parcel.dataPosition();
+ // Check the size.
+ final int size = parcel.readInt();
+
+ if (size <= kRecordHeaderSize) { // at least 1 byte should be present.
+ Log.e(TAG, "Record is too short");
+ error = true;
+ break;
+ }
+
+ // Check the metadata key.
+ final int metadataId = parcel.readInt();
+ if (!checkMetadataId(metadataId)) {
+ error = true;
+ break;
+ }
+
+ // Store the record offset which points to the type
+ // field so we can later on read/unmarshall the record
+ // payload.
+ if (mKeyToPosMap.containsKey(metadataId)) {
+ Log.e(TAG, "Duplicate metadata ID found");
+ error = true;
+ break;
+ }
+
+ mKeyToPosMap.put(metadataId, parcel.dataPosition());
+
+ // Check the metadata type.
+ final int metadataType = parcel.readInt();
+ if (metadataType <= 0 || metadataType > LAST_TYPE) {
+ Log.e(TAG, "Invalid metadata type " + metadataType);
+ error = true;
+ break;
+ }
+
+ // Skip to the next one.
+ parcel.setDataPosition(start + size);
+ bytesLeft -= size;
+ ++recCount;
+ }
+
+ if (0 != bytesLeft || error) {
+ Log.e(TAG, "Ran out of data or error on record " + recCount);
+ mKeyToPosMap.clear();
+ return false;
+ } else {
+ return true;
+ }
+ }
+
+ /**
+ * Check a parcel containing metadata is well formed. The header
+ * is checked as well as the individual records format. However, the
+ * data inside the record is not checked because we do lazy access
+ * (we check/unmarshall only data the user asks for.)
+ *
+ * Format of a metadata parcel:
+ <pre>
+ 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata total size |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | 'M' | 'E' | 'T' | 'A' |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | |
+ | .... metadata records .... |
+ | |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ </pre>
+ *
+ * @param parcel With the serialized data. Metadata keeps a
+ * reference on it to access it later on. The caller
+ * should not modify the parcel after this call (and
+ * not call recycle on it.)
+ * @return false if an error occurred.
+ */
+ public boolean parse(Parcel parcel) {
+ if (parcel.dataAvail() < kMetaHeaderSize) {
+ Log.e(TAG, "Not enough data " + parcel.dataAvail());
+ return false;
+ }
+
+ final int pin = parcel.dataPosition(); // to roll back in case of errors.
+ final int size = parcel.readInt();
+
+ // The extra kInt32Size below is to account for the int32 'size' just read.
+ if (parcel.dataAvail() + kInt32Size < size || size < kMetaHeaderSize) {
+ Log.e(TAG, "Bad size " + size + " avail " + parcel.dataAvail() + " position " + pin);
+ parcel.setDataPosition(pin);
+ return false;
+ }
+
+ // Checks if the 'M' 'E' 'T' 'A' marker is present.
+ final int kShouldBeMetaMarker = parcel.readInt();
+ if (kShouldBeMetaMarker != kMetaMarker ) {
+ Log.e(TAG, "Marker missing " + Integer.toHexString(kShouldBeMetaMarker));
+ parcel.setDataPosition(pin);
+ return false;
+ }
+
+ // Scan the records to collect metadata ids and offsets.
+ if (!scanAllRecords(parcel, size - kMetaHeaderSize)) {
+ parcel.setDataPosition(pin);
+ return false;
+ }
+ mParcel = parcel;
+ return true;
+ }
+
+ /**
+ * @return The set of metadata ID found.
+ */
+ public Set<Integer> keySet() {
+ return mKeyToPosMap.keySet();
+ }
+
+ /**
+ * @return true if a value is present for the given key.
+ */
+ public boolean has(final int metadataId) {
+ if (!checkMetadataId(metadataId)) {
+ throw new IllegalArgumentException("Invalid key: " + metadataId);
+ }
+ return mKeyToPosMap.containsKey(metadataId);
+ }
+
+ // Accessors.
+ // Caller must make sure the key is present using the {@code has}
+ // method otherwise a RuntimeException will occur.
+
+ public String getString(final int key) {
+ checkType(key, STRING_VAL);
+ return mParcel.readString();
+ }
+
+ public int getInt(final int key) {
+ checkType(key, INTEGER_VAL);
+ return mParcel.readInt();
+ }
+
+ public boolean getBoolean(final int key) {
+ checkType(key, BOOLEAN_VAL);
+ return mParcel.readInt() == 1;
+ }
+
+ public long getLong(final int key) {
+ checkType(key, LONG_VAL);
+ return mParcel.readLong();
+ }
+
+ public double getDouble(final int key) {
+ checkType(key, DOUBLE_VAL);
+ return mParcel.readDouble();
+ }
+
+ public byte[] getByteArray(final int key) {
+ checkType(key, BYTE_ARRAY_VAL);
+ return mParcel.createByteArray();
+ }
+
+ public Date getDate(final int key) {
+ checkType(key, DATE_VAL);
+ final long timeSinceEpoch = mParcel.readLong();
+ final String timeZone = mParcel.readString();
+
+ if (timeZone.length() == 0) {
+ return new Date(timeSinceEpoch);
+ } else {
+ TimeZone tz = TimeZone.getTimeZone(timeZone);
+ Calendar cal = Calendar.getInstance(tz);
+
+ cal.setTimeInMillis(timeSinceEpoch);
+ return cal.getTime();
+ }
+ }
+
+ public TimedText getTimedText(final int key) {
+ checkType(key, TIMED_TEXT_VAL);
+ final Date startTime = new Date(mParcel.readLong()); // epoch
+ final int duration = mParcel.readInt(); // millisec
+
+ return new TimedText(startTime,
+ duration,
+ mParcel.readString());
+ }
+
+ // @return the last available system metadata id. Ids are
+ // 1-indexed.
+ public static int lastSytemId() { return LAST_SYSTEM; }
+
+ // @return the first available cutom metadata id.
+ public static int firstCustomId() { return FIRST_CUSTOM; }
+
+ // @return the last value of known type. Types are 1-indexed.
+ public static int lastType() { return LAST_TYPE; }
+
+ // Check val is either a system id or a custom one.
+ // @param val Metadata key to test.
+ // @return true if it is in a valid range.
+ private boolean checkMetadataId(final int val) {
+ if (val <= ANY || (LAST_SYSTEM < val && val < FIRST_CUSTOM)) {
+ Log.e(TAG, "Invalid metadata ID " + val);
+ return false;
+ }
+ return true;
+ }
+
+ // Check the type of the data match what is expected.
+ private void checkType(final int key, final int expectedType) {
+ final int pos = mKeyToPosMap.get(key);
+
+ mParcel.setDataPosition(pos);
+
+ final int type = mParcel.readInt();
+ if (type != expectedType) {
+ throw new IllegalStateException("Wrong type " + expectedType + " but got " + type);
+ }
+ }
+}
diff --git a/media/java/android/media/RingtoneManager.java b/media/java/android/media/RingtoneManager.java
index 42edae6..44026e4 100644
--- a/media/java/android/media/RingtoneManager.java
+++ b/media/java/android/media/RingtoneManager.java
@@ -122,8 +122,9 @@ public class RingtoneManager {
* current ringtone, which will be used to show a checkmark next to the item
* for this {@link Uri}. If showing an item for "Default" (@see
* {@link #EXTRA_RINGTONE_SHOW_DEFAULT}), this can also be one of
- * {@link System#DEFAULT_RINGTONE_URI} or
- * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" item
+ * {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" item
* checked.
*
* @see #ACTION_RINGTONE_PICKER
@@ -134,8 +135,9 @@ public class RingtoneManager {
/**
* Given to the ringtone picker as a {@link Uri}. The {@link Uri} of the
* ringtone to play when the user attempts to preview the "Default"
- * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI} or
- * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" point to
+ * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" point to
* the current sound for the given default sound type. If you are showing a
* ringtone picker for some other type of sound, you are free to provide any
* {@link Uri} here.
@@ -163,8 +165,9 @@ public class RingtoneManager {
* <p>
* It will be one of:
* <li> the picked ringtone,
- * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI} or
- * {@link System#DEFAULT_NOTIFICATION_URI} if the default was chosen,
+ * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} if the default was chosen,
* <li> null if the "Silent" item was picked.
*
* @see #ACTION_RINGTONE_PICKER
@@ -627,7 +630,8 @@ public class RingtoneManager {
*
* @param context A context used for querying.
* @param type The type whose default sound should be returned. One of
- * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}.
+ * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or
+ * {@link #TYPE_ALARM}.
* @return A {@link Uri} pointing to the default sound for the sound type.
* @see #setActualDefaultRingtoneUri(Context, int, Uri)
*/
@@ -643,7 +647,8 @@ public class RingtoneManager {
*
* @param context A context used for querying.
* @param type The type whose default sound should be set. One of
- * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}.
+ * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or
+ * {@link #TYPE_ALARM}.
* @param ringtoneUri A {@link Uri} pointing to the default sound to set.
* @see #getActualDefaultRingtoneUri(Context, int)
*/
@@ -658,6 +663,8 @@ public class RingtoneManager {
return Settings.System.RINGTONE;
} else if ((type & TYPE_NOTIFICATION) != 0) {
return Settings.System.NOTIFICATION_SOUND;
+ } else if ((type & TYPE_ALARM) != 0) {
+ return Settings.System.ALARM_ALERT;
} else {
return null;
}
@@ -677,8 +684,9 @@ public class RingtoneManager {
* Returns the type of a default {@link Uri}.
*
* @param defaultRingtoneUri The default {@link Uri}. For example,
- * {@link System#DEFAULT_RINGTONE_URI} or
- * {@link System#DEFAULT_NOTIFICATION_URI}.
+ * {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI}.
* @return The type of the defaultRingtoneUri, or -1.
*/
public static int getDefaultType(Uri defaultRingtoneUri) {
@@ -688,6 +696,8 @@ public class RingtoneManager {
return TYPE_RINGTONE;
} else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_NOTIFICATION_URI)) {
return TYPE_NOTIFICATION;
+ } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_ALARM_ALERT_URI)) {
+ return TYPE_ALARM;
} else {
return -1;
}
@@ -707,6 +717,8 @@ public class RingtoneManager {
return Settings.System.DEFAULT_RINGTONE_URI;
} else if ((type & TYPE_NOTIFICATION) != 0) {
return Settings.System.DEFAULT_NOTIFICATION_URI;
+ } else if ((type & TYPE_ALARM) != 0) {
+ return Settings.System.DEFAULT_ALARM_ALERT_URI;
} else {
return null;
}
diff --git a/media/java/android/media/ToneGenerator.java b/media/java/android/media/ToneGenerator.java
index e5ee9a3..c60a1ac 100644
--- a/media/java/android/media/ToneGenerator.java
+++ b/media/java/android/media/ToneGenerator.java
@@ -724,9 +724,9 @@ public class ToneGenerator
public static final int TONE_CDMA_SIGNAL_OFF = 98;
/** Maximum volume, for use with {@link #ToneGenerator(int,int)} */
- public static final int MAX_VOLUME = AudioSystem.MAX_VOLUME;
+ public static final int MAX_VOLUME = 100;
/** Minimum volume setting, for use with {@link #ToneGenerator(int,int)} */
- public static final int MIN_VOLUME = AudioSystem.MIN_VOLUME;
+ public static final int MIN_VOLUME = 0;
/**
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 8ee0cbd..1f37111 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -18,14 +18,16 @@ LOCAL_SHARED_LIBRARIES := \
libnativehelper \
libcutils \
libutils \
+ libbinder \
libmedia \
- libsgl \
+ libskia \
libui
LOCAL_STATIC_LIBRARIES :=
LOCAL_C_INCLUDES += \
external/tremor/Tremor \
+ frameworks/base/core/jni \
$(PV_INCLUDES) \
$(JNI_H_INCLUDE) \
$(call include-path-for, corecg graphics)
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 6317fe2..d26d039 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -20,6 +20,7 @@
#include "utils/Log.h"
#include <media/mediaplayer.h>
+#include <media/MediaPlayerInterface.h>
#include <stdio.h>
#include <assert.h>
#include <limits.h>
@@ -30,6 +31,8 @@
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
#include "utils/Errors.h" // for status_t
+#include "android_util_Binder.h"
+#include <binder/Parcel.h>
// ----------------------------------------------------------------------------
@@ -98,10 +101,9 @@ void JNIMediaPlayerListener::notify(int msg, int ext1, int ext2)
// ----------------------------------------------------------------------------
-static sp<Surface> get_surface(JNIEnv* env, jobject clazz)
+static Surface* get_surface(JNIEnv* env, jobject clazz)
{
- Surface* const p = (Surface*)env->GetIntField(clazz, fields.surface_native);
- return sp<Surface>(p);
+ return (Surface*)env->GetIntField(clazz, fields.surface_native);
}
static sp<MediaPlayer> getMediaPlayer(JNIEnv* env, jobject thiz)
@@ -202,7 +204,7 @@ static void setVideoSurface(const sp<MediaPlayer>& mp, JNIEnv *env, jobject thiz
{
jobject surface = env->GetObjectField(thiz, fields.surface);
if (surface != NULL) {
- const sp<Surface>& native_surface = get_surface(env, surface);
+ const sp<Surface> native_surface = get_surface(env, surface);
LOGV("prepare: surface=%p (id=%d)",
native_surface.get(), native_surface->ID());
mp->setVideoSurface(native_surface);
@@ -242,7 +244,7 @@ android_media_MediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
}
jobject surface = env->GetObjectField(thiz, fields.surface);
if (surface != NULL) {
- const sp<Surface>& native_surface = get_surface(env, surface);
+ const sp<Surface> native_surface = get_surface(env, surface);
LOGV("prepareAsync: surface=%p (id=%d)",
native_surface.get(), native_surface->ID());
mp->setVideoSurface(native_surface);
@@ -442,6 +444,74 @@ android_media_MediaPlayer_getFrameAt(JNIEnv *env, jobject thiz, jint msec)
return NULL;
}
+
+// Sends the request and reply parcels to the media player via the
+// binder interface.
+static jint
+android_media_MediaPlayer_invoke(JNIEnv *env, jobject thiz,
+ jobject java_request, jobject java_reply)
+{
+ sp<MediaPlayer> media_player = getMediaPlayer(env, thiz);
+ if (media_player == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return UNKNOWN_ERROR;
+ }
+
+
+ Parcel *request = parcelForJavaObject(env, java_request);
+ Parcel *reply = parcelForJavaObject(env, java_reply);
+
+ // Don't use process_media_player_call which use the async loop to
+ // report errors, instead returns the status.
+ return media_player->invoke(*request, reply);
+}
+
+// Sends the new filter to the client.
+static jint
+android_media_MediaPlayer_setMetadataFilter(JNIEnv *env, jobject thiz, jobject request)
+{
+ sp<MediaPlayer> media_player = getMediaPlayer(env, thiz);
+ if (media_player == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return UNKNOWN_ERROR;
+ }
+
+ Parcel *filter = parcelForJavaObject(env, request);
+
+ if (filter == NULL ) {
+ jniThrowException(env, "java/lang/RuntimeException", "Filter is null");
+ return UNKNOWN_ERROR;
+ }
+
+ return media_player->setMetadataFilter(*filter);
+}
+
+static jboolean
+android_media_MediaPlayer_getMetadata(JNIEnv *env, jobject thiz, jboolean update_only,
+ jboolean apply_filter, jobject reply)
+{
+ sp<MediaPlayer> media_player = getMediaPlayer(env, thiz);
+ if (media_player == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return false;
+ }
+
+ Parcel *metadata = parcelForJavaObject(env, reply);
+
+ if (metadata == NULL ) {
+ jniThrowException(env, "java/lang/RuntimeException", "Reply parcel is null");
+ return false;
+ }
+
+ metadata->freeData();
+ // On return metadata is positioned at the beginning of the
+ // metadata. Note however that the parcel actually starts with the
+ // return code so you should not rewind the parcel using
+ // setDataPosition(0).
+ return media_player->getMetadata(update_only, apply_filter, metadata) == OK;
+}
+
+
static void
android_media_MediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
@@ -503,6 +573,9 @@ static JNINativeMethod gMethods[] = {
{"isLooping", "()Z", (void *)android_media_MediaPlayer_isLooping},
{"setVolume", "(FF)V", (void *)android_media_MediaPlayer_setVolume},
{"getFrameAt", "(I)Landroid/graphics/Bitmap;", (void *)android_media_MediaPlayer_getFrameAt},
+ {"native_invoke", "(Landroid/os/Parcel;Landroid/os/Parcel;)I",(void *)android_media_MediaPlayer_invoke},
+ {"native_setMetadataFilter", "(Landroid/os/Parcel;)I", (void *)android_media_MediaPlayer_setMetadataFilter},
+ {"native_getMetadata", "(ZZLandroid/os/Parcel;)Z", (void *)android_media_MediaPlayer_getMetadata},
{"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaPlayer_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize},
};
diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk
index 374ddeb..9ff2e24 100644
--- a/media/jni/soundpool/Android.mk
+++ b/media/jni/soundpool/Android.mk
@@ -9,6 +9,7 @@ LOCAL_SRC_FILES:= \
LOCAL_SHARED_LIBRARIES := \
libcutils \
libutils \
+ libbinder \
libandroid_runtime \
libnativehelper \
libmedia
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index 1fdecdd..0d07abe 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -527,13 +527,14 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
// wrong audio audio buffer size (mAudioBufferSize)
unsigned long toggle = mToggle ^ 1;
void *userData = (void *)((unsigned long)this | toggle);
+ uint32_t channels = (numChannels == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO;
#ifdef USE_SHARED_MEM_BUFFER
newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- numChannels, sample->getIMemory(), 0, callback, userData);
+ channels, sample->getIMemory(), 0, callback, userData);
#else
newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- numChannels, frameCount, 0, callback, userData, bufferFrames);
+ channels, frameCount, 0, callback, userData, bufferFrames);
#endif
if (newTrack->initCheck() != NO_ERROR) {
LOGE("Error creating AudioTrack");
diff --git a/media/libdrm/mobile2/include/rights/RoManager.h b/media/libdrm/mobile2/include/rights/RoManager.h
index cf398b3..71e9eef 100644
--- a/media/libdrm/mobile2/include/rights/RoManager.h
+++ b/media/libdrm/mobile2/include/rights/RoManager.h
@@ -64,12 +64,6 @@ public:
vector<Ro*> getAllRo();
/**
- * Get the private key of the device.
- * @return the private key.
- */
- const string& getDevicePrivateKey() const;
-
- /**
* Get ro which contained rights of specific content.
* @param contentID the specific id of content.
* @return NULL if not fount otherwise the related ro.
diff --git a/media/libdrm/mobile2/src/rights/RoManager.cpp b/media/libdrm/mobile2/src/rights/RoManager.cpp
index 848c2ba..a115d21 100644
--- a/media/libdrm/mobile2/src/rights/RoManager.cpp
+++ b/media/libdrm/mobile2/src/rights/RoManager.cpp
@@ -121,9 +121,3 @@ bool RoManager::checkRoInCache(const string& roID)
return true;
}
-/** see RoManager.h */
-const string& RoManager::getDevicePrivateKey() const
-{
- string pk;
- return pk;
-}
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 8020da2..9d442c3 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -2,31 +2,34 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- AudioTrack.cpp \
- IAudioFlinger.cpp \
- IAudioFlingerClient.cpp \
- IAudioTrack.cpp \
- IAudioRecord.cpp \
- AudioRecord.cpp \
- AudioSystem.cpp \
- mediaplayer.cpp \
- IMediaPlayerService.cpp \
- IMediaPlayerClient.cpp \
- IMediaPlayer.cpp \
- IMediaRecorder.cpp \
- mediarecorder.cpp \
- IMediaMetadataRetriever.cpp \
- mediametadataretriever.cpp \
- ToneGenerator.cpp \
- JetPlayer.cpp
+ AudioTrack.cpp \
+ IAudioFlinger.cpp \
+ IAudioFlingerClient.cpp \
+ IAudioTrack.cpp \
+ IAudioRecord.cpp \
+ AudioRecord.cpp \
+ AudioSystem.cpp \
+ mediaplayer.cpp \
+ IMediaPlayerService.cpp \
+ IMediaPlayerClient.cpp \
+ IMediaPlayer.cpp \
+ IMediaRecorder.cpp \
+ Metadata.cpp \
+ mediarecorder.cpp \
+ IMediaMetadataRetriever.cpp \
+ mediametadataretriever.cpp \
+ ToneGenerator.cpp \
+ JetPlayer.cpp \
+ IOMX.cpp \
+ IAudioPolicyService.cpp
LOCAL_SHARED_LIBRARIES := \
- libui libcutils libutils libsonivox
+ libui libcutils libutils libbinder libsonivox
LOCAL_MODULE:= libmedia
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
+LOCAL_LDLIBS += -ldl -lpthread
endif
ifneq ($(TARGET_SIMULATOR),true)
@@ -34,6 +37,7 @@ LOCAL_SHARED_LIBRARIES += libdl
endif
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg)
+ $(call include-path-for, graphics corecg) \
+ $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index e56efbb..5e35564 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -28,12 +28,13 @@
#include <media/AudioSystem.h>
#include <media/AudioRecord.h>
+#include <media/mediarecorder.h>
-#include <utils/IServiceManager.h>
+#include <binder/IServiceManager.h>
#include <utils/Log.h>
-#include <utils/MemoryDealer.h>
-#include <utils/Parcel.h>
-#include <utils/IPCThreadState.h>
+#include <binder/MemoryDealer.h>
+#include <binder/Parcel.h>
+#include <binder/IPCThreadState.h>
#include <utils/Timers.h>
#include <cutils/atomic.h>
@@ -45,7 +46,7 @@ namespace android {
// ---------------------------------------------------------------------------
AudioRecord::AudioRecord()
- : mStatus(NO_INIT)
+ : mStatus(NO_INIT), mInput(0)
{
}
@@ -53,15 +54,15 @@ AudioRecord::AudioRecord(
int inputSource,
uint32_t sampleRate,
int format,
- int channelCount,
+ uint32_t channels,
int frameCount,
uint32_t flags,
callback_t cbf,
void* user,
int notificationFrames)
- : mStatus(NO_INIT)
+ : mStatus(NO_INIT), mInput(0)
{
- mStatus = set(inputSource, sampleRate, format, channelCount,
+ mStatus = set(inputSource, sampleRate, format, channels,
frameCount, flags, cbf, user, notificationFrames);
}
@@ -78,6 +79,7 @@ AudioRecord::~AudioRecord()
}
mAudioRecord.clear();
IPCThreadState::self()->flushCommands();
+ AudioSystem::releaseInput(mInput);
}
}
@@ -85,7 +87,7 @@ status_t AudioRecord::set(
int inputSource,
uint32_t sampleRate,
int format,
- int channelCount,
+ uint32_t channels,
int frameCount,
uint32_t flags,
callback_t cbf,
@@ -94,7 +96,7 @@ status_t AudioRecord::set(
bool threadCanCallJava)
{
- LOGV("set(): sampleRate %d, channelCount %d, frameCount %d",sampleRate, channelCount, frameCount);
+ LOGV("set(): sampleRate %d, channels %d, frameCount %d",sampleRate, channels, frameCount);
if (mAudioRecord != 0) {
return INVALID_OPERATION;
}
@@ -104,8 +106,8 @@ status_t AudioRecord::set(
return NO_INIT;
}
- if (inputSource == DEFAULT_INPUT) {
- inputSource = MIC_INPUT;
+ if (inputSource == AUDIO_SOURCE_DEFAULT) {
+ inputSource = AUDIO_SOURCE_MIC;
}
if (sampleRate == 0) {
@@ -115,15 +117,21 @@ status_t AudioRecord::set(
if (format == 0) {
format = AudioSystem::PCM_16_BIT;
}
- if (channelCount == 0) {
- channelCount = 1;
+ // validate parameters
+ if (!AudioSystem::isValidFormat(format)) {
+ LOGE("Invalid format");
+ return BAD_VALUE;
}
- // validate parameters
- if (format != AudioSystem::PCM_16_BIT) {
+ if (!AudioSystem::isInputChannel(channels)) {
return BAD_VALUE;
}
- if (channelCount != 1 && channelCount != 2) {
+ int channelCount = AudioSystem::popCount(channels);
+
+ mInput = AudioSystem::getInput(inputSource,
+ sampleRate, format, channels, (AudioSystem::audio_in_acoustics)flags);
+ if (mInput == 0) {
+ LOGE("Could not get audio output for stream type %d", inputSource);
return BAD_VALUE;
}
@@ -132,14 +140,22 @@ status_t AudioRecord::set(
if (AudioSystem::getInputBufferSize(sampleRate, format, channelCount, &inputBuffSizeInBytes)
!= NO_ERROR) {
LOGE("AudioSystem could not query the input buffer size.");
- return NO_INIT;
+ return NO_INIT;
}
+
if (inputBuffSizeInBytes == 0) {
LOGE("Recording parameters are not supported: sampleRate %d, channelCount %d, format %d",
sampleRate, channelCount, format);
return BAD_VALUE;
}
+
int frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? 2 : 1);
+ if (AudioSystem::isLinearPCM(format)) {
+ frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? sizeof(int16_t) : sizeof(int8_t));
+ } else {
+ frameSizeInBytes = sizeof(int8_t);
+ }
+
// We use 2* size of input buffer for ping pong use of record buffer.
int minFrameCount = 2 * inputBuffSizeInBytes / frameSizeInBytes;
@@ -157,11 +173,11 @@ status_t AudioRecord::set(
// open record channel
status_t status;
- sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), inputSource,
+ sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), mInput,
sampleRate, format,
channelCount,
frameCount,
- ((uint16_t)flags) << 16,
+ ((uint16_t)flags) << 16,
&status);
if (record == 0) {
LOGE("AudioFlinger could not create record track, status: %d", status);
@@ -188,7 +204,7 @@ status_t AudioRecord::set(
mFormat = format;
// Update buffer size in case it has been limited by AudioFlinger during track creation
mFrameCount = mCblk->frameCount;
- mChannelCount = channelCount;
+ mChannelCount = (uint8_t)channelCount;
mActive = 0;
mCbf = cbf;
mNotificationFrames = notificationFrames;
@@ -234,7 +250,11 @@ uint32_t AudioRecord::frameCount() const
int AudioRecord::frameSize() const
{
- return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+ if (AudioSystem::isLinearPCM(mFormat)) {
+ return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+ } else {
+ return sizeof(uint8_t);
+ }
}
int AudioRecord::inputSource() const
@@ -262,15 +282,18 @@ status_t AudioRecord::start()
}
if (android_atomic_or(1, &mActive) == 0) {
- mNewPosition = mCblk->user + mUpdatePeriod;
- mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
- mCblk->waitTimeMs = 0;
- if (t != 0) {
- t->run("ClientRecordThread", THREAD_PRIORITY_AUDIO_CLIENT);
- } else {
- setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
+ ret = AudioSystem::startInput(mInput);
+ if (ret == NO_ERROR) {
+ mNewPosition = mCblk->user + mUpdatePeriod;
+ mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
+ mCblk->waitTimeMs = 0;
+ if (t != 0) {
+ t->run("ClientRecordThread", THREAD_PRIORITY_AUDIO_CLIENT);
+ } else {
+ setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
+ }
+ ret = mAudioRecord->start();
}
- ret = mAudioRecord->start();
}
if (t != 0) {
@@ -301,6 +324,7 @@ status_t AudioRecord::stop()
} else {
setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_NORMAL);
}
+ AudioSystem::stopInput(mInput);
}
if (t != 0) {
@@ -421,7 +445,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
"this shouldn't happen (user=%08x, server=%08x)", cblk->user, cblk->server);
cblk->waitTimeMs = 0;
-
+
if (framesReq > framesReady) {
framesReq = framesReady;
}
@@ -437,7 +461,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
audioBuffer->channelCount= mChannelCount;
audioBuffer->format = mFormat;
audioBuffer->frameCount = framesReq;
- audioBuffer->size = framesReq*mChannelCount*sizeof(int16_t);
+ audioBuffer->size = framesReq*cblk->frameSize;
audioBuffer->raw = (int8_t*)cblk->buffer(u);
active = mActive;
return active ? status_t(NO_ERROR) : status_t(STOPPED);
@@ -468,7 +492,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
do {
- audioBuffer.frameCount = userSize/mChannelCount/sizeof(int16_t);
+ audioBuffer.frameCount = userSize/frameSize();
// Calling obtainBuffer() with a negative wait count causes
// an (almost) infinite wait time.
@@ -519,8 +543,8 @@ bool AudioRecord::processAudioBuffer(const sp<ClientRecordThread>& thread)
do {
audioBuffer.frameCount = frames;
- // Calling obtainBuffer() with a wait count of 1
- // limits wait time to WAIT_PERIOD_MS. This prevents from being
+ // Calling obtainBuffer() with a wait count of 1
+ // limits wait time to WAIT_PERIOD_MS. This prevents from being
// stuck here not being able to handle timed events (position, markers).
status_t err = obtainBuffer(&audioBuffer, 1);
if (err < NO_ERROR) {
@@ -548,14 +572,14 @@ bool AudioRecord::processAudioBuffer(const sp<ClientRecordThread>& thread)
if (readSize > reqSize) readSize = reqSize;
audioBuffer.size = readSize;
- audioBuffer.frameCount = readSize/mChannelCount/sizeof(int16_t);
+ audioBuffer.frameCount = readSize/frameSize();
frames -= audioBuffer.frameCount;
releaseBuffer(&audioBuffer);
} while (frames);
-
+
// Manage overrun callback
if (mActive && (mCblk->framesAvailable_l() == 0)) {
LOGV("Overrun user: %x, server: %x, flowControlFlag %d", mCblk->user, mCblk->server, mCblk->flowControlFlag);
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index a21a7a4..1fc1024 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -18,10 +18,20 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
-#include <utils/IServiceManager.h>
+#include <binder/IServiceManager.h>
#include <media/AudioSystem.h>
+#include <media/IAudioPolicyService.h>
#include <math.h>
+// ----------------------------------------------------------------------------
+// the sim build doesn't have gettid
+
+#ifndef HAVE_GETTID
+# define gettid getpid
+#endif
+
+// ----------------------------------------------------------------------------
+
namespace android {
// client singleton for AudioFlinger binder interface
@@ -30,10 +40,9 @@ sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
// Cached values
-int AudioSystem::gOutSamplingRate[NUM_AUDIO_OUTPUT_TYPES];
-int AudioSystem::gOutFrameCount[NUM_AUDIO_OUTPUT_TYPES];
-uint32_t AudioSystem::gOutLatency[NUM_AUDIO_OUTPUT_TYPES];
-bool AudioSystem::gA2dpEnabled;
+DefaultKeyedVector<int, audio_io_handle_t> AudioSystem::gStreamOutputMap(0);
+DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(0);
+
// Cached values for recording queries
uint32_t AudioSystem::gPrevInSamplingRate = 16000;
int AudioSystem::gPrevInFormat = AudioSystem::PCM_16_BIT;
@@ -65,42 +74,10 @@ const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
binder->linkToDeath(gAudioFlingerClient);
gAudioFlinger = interface_cast<IAudioFlinger>(binder);
gAudioFlinger->registerClient(gAudioFlingerClient);
- // Cache frequently accessed parameters
- for (int output = 0; output < NUM_AUDIO_OUTPUT_TYPES; output++) {
- gOutFrameCount[output] = (int)gAudioFlinger->frameCount(output);
- gOutSamplingRate[output] = (int)gAudioFlinger->sampleRate(output);
- gOutLatency[output] = gAudioFlinger->latency(output);
- }
- gA2dpEnabled = gAudioFlinger->isA2dpEnabled();
}
LOGE_IF(gAudioFlinger==0, "no AudioFlinger!?");
- return gAudioFlinger;
-}
-// routing helper functions
-status_t AudioSystem::speakerphone(bool state) {
- uint32_t routes = state ? ROUTE_SPEAKER : ROUTE_EARPIECE;
- return setRouting(MODE_IN_CALL, routes, ROUTE_ALL);
-}
-
-status_t AudioSystem::isSpeakerphoneOn(bool* state) {
- uint32_t routes = 0;
- status_t s = getRouting(MODE_IN_CALL, &routes);
- *state = !!(routes & ROUTE_SPEAKER);
- return s;
-}
-
-status_t AudioSystem::bluetoothSco(bool state) {
- uint32_t mask = ROUTE_BLUETOOTH_SCO;
- uint32_t routes = state ? mask : ROUTE_EARPIECE;
- return setRouting(MODE_IN_CALL, routes, ROUTE_ALL);
-}
-
-status_t AudioSystem::isBluetoothScoOn(bool* state) {
- uint32_t routes = 0;
- status_t s = getRouting(MODE_IN_CALL, &routes);
- *state = !!(routes & ROUTE_BLUETOOTH_SCO);
- return s;
+ return gAudioFlinger;
}
status_t AudioSystem::muteMicrophone(bool state) {
@@ -148,12 +125,12 @@ status_t AudioSystem::getMasterMute(bool* mute)
return NO_ERROR;
}
-status_t AudioSystem::setStreamVolume(int stream, float value)
+status_t AudioSystem::setStreamVolume(int stream, float value, void *output)
{
if (uint32_t(stream) >= NUM_STREAM_TYPES) return BAD_VALUE;
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- af->setStreamVolume(stream, value);
+ af->setStreamVolume(stream, value, output);
return NO_ERROR;
}
@@ -166,12 +143,12 @@ status_t AudioSystem::setStreamMute(int stream, bool mute)
return NO_ERROR;
}
-status_t AudioSystem::getStreamVolume(int stream, float* volume)
+status_t AudioSystem::getStreamVolume(int stream, float* volume, void *output)
{
if (uint32_t(stream) >= NUM_STREAM_TYPES) return BAD_VALUE;
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- *volume = af->streamVolume(stream);
+ *volume = af->streamVolume(stream, output);
return NO_ERROR;
}
@@ -192,43 +169,28 @@ status_t AudioSystem::setMode(int mode)
return af->setMode(mode);
}
-status_t AudioSystem::getMode(int* mode)
-{
+
+status_t AudioSystem::isMusicActive(bool* state) {
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- *mode = af->getMode();
+ *state = af->isMusicActive();
return NO_ERROR;
}
-status_t AudioSystem::setRouting(int mode, uint32_t routes, uint32_t mask)
-{
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- return af->setRouting(mode, routes, mask);
-}
-status_t AudioSystem::getRouting(int mode, uint32_t* routes)
-{
+status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- uint32_t r = af->getRouting(mode);
- *routes = r;
- return NO_ERROR;
+ return af->setParameters(ioHandle, keyValuePairs);
}
-status_t AudioSystem::isMusicActive(bool* state) {
+String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- *state = af->isMusicActive();
- return NO_ERROR;
-}
+ String8 result = String8("");
+ if (af == 0) return result;
-// Temporary interface, do not use
-// TODO: Replace with a more generic key:value get/set mechanism
-status_t AudioSystem::setParameter(const char* key, const char* value) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- return af->setParameter(key, value);
+ result = af->getParameters(ioHandle, keys);
+ return result;
}
// convert volume steps to natural log scale
@@ -257,55 +219,108 @@ int AudioSystem::logToLinear(float volume)
status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType)
{
- int output = getOutput(streamType);
-
- if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED;
+ OutputDescriptor *outputDesc;
+ audio_io_handle_t output;
+
+ if (streamType == DEFAULT) {
+ streamType = MUSIC;
+ }
+
+ output = getOutput((stream_type)streamType);
+ if (output == 0) {
+ return PERMISSION_DENIED;
+ }
+
+ gLock.lock();
+ outputDesc = AudioSystem::gOutputs.valueFor(output);
+ if (outputDesc == 0) {
+ LOGV("getOutputSamplingRate() no output descriptor for output %p in gOutputs", output);
+ gLock.unlock();
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ *samplingRate = af->sampleRate(output);
+ } else {
+ LOGV("getOutputSamplingRate() reading from output desc");
+ *samplingRate = outputDesc->samplingRate;
+ gLock.unlock();
+ }
+
+ LOGV("getOutputSamplingRate() streamType %d, output %p, sampling rate %d", streamType, output, *samplingRate);
- // gOutSamplingRate[] is updated by getOutput() which calls get_audio_flinger()
- LOGV("getOutputSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, gOutSamplingRate[output]);
-
- *samplingRate = gOutSamplingRate[output];
-
return NO_ERROR;
}
status_t AudioSystem::getOutputFrameCount(int* frameCount, int streamType)
{
- int output = getOutput(streamType);
+ OutputDescriptor *outputDesc;
+ audio_io_handle_t output;
+
+ if (streamType == DEFAULT) {
+ streamType = MUSIC;
+ }
- if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED;
+ output = getOutput((stream_type)streamType);
+ if (output == 0) {
+ return PERMISSION_DENIED;
+ }
+
+ gLock.lock();
+ outputDesc = AudioSystem::gOutputs.valueFor(output);
+ if (outputDesc == 0) {
+ gLock.unlock();
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ *frameCount = af->frameCount(output);
+ } else {
+ *frameCount = outputDesc->frameCount;
+ gLock.unlock();
+ }
- // gOutFrameCount[] is updated by getOutput() which calls get_audio_flinger()
- LOGV("getOutputFrameCount() streamType %d, output %d, frame count %d", streamType, output, gOutFrameCount[output]);
+ LOGV("getOutputFrameCount() streamType %d, output %p, frameCount %d", streamType, output, *frameCount);
- *frameCount = gOutFrameCount[output];
-
return NO_ERROR;
}
status_t AudioSystem::getOutputLatency(uint32_t* latency, int streamType)
{
- int output = getOutput(streamType);
+ OutputDescriptor *outputDesc;
+ audio_io_handle_t output;
+
+ if (streamType == DEFAULT) {
+ streamType = MUSIC;
+ }
- if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED;
+ output = getOutput((stream_type)streamType);
+ if (output == 0) {
+ return PERMISSION_DENIED;
+ }
+
+ gLock.lock();
+ outputDesc = AudioSystem::gOutputs.valueFor(output);
+ if (outputDesc == 0) {
+ gLock.unlock();
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ *latency = af->latency(output);
+ } else {
+ *latency = outputDesc->latency;
+ gLock.unlock();
+ }
- // gOutLatency[] is updated by getOutput() which calls get_audio_flinger()
- LOGV("getOutputLatency() streamType %d, output %d, latency %d", streamType, output, gOutLatency[output]);
+ LOGV("getOutputLatency() streamType %d, output %p, latency %d", streamType, output, *latency);
- *latency = gOutLatency[output];
-
return NO_ERROR;
}
-status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int channelCount,
+status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int channelCount,
size_t* buffSize)
{
// Do we have a stale gInBufferSize or are we requesting the input buffer size for new values
- if ((gInBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat)
+ if ((gInBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat)
|| (channelCount != gPrevInChannelCount)) {
// save the request params
gPrevInSamplingRate = sampleRate;
- gPrevInFormat = format;
+ gPrevInFormat = format;
gPrevInChannelCount = channelCount;
gInBuffSize = 0;
@@ -314,24 +329,18 @@ status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int ch
return PERMISSION_DENIED;
}
gInBuffSize = af->getInputBufferSize(sampleRate, format, channelCount);
- }
+ }
*buffSize = gInBuffSize;
-
+
return NO_ERROR;
}
// ---------------------------------------------------------------------------
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
+void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
Mutex::Autolock _l(AudioSystem::gLock);
- AudioSystem::gAudioFlinger.clear();
- for (int output = 0; output < NUM_AUDIO_OUTPUT_TYPES; output++) {
- gOutFrameCount[output] = 0;
- gOutSamplingRate[output] = 0;
- gOutLatency[output] = 0;
- }
- AudioSystem::gInBuffSize = 0;
+ AudioSystem::gAudioFlinger.clear();
if (gAudioErrorCallback) {
gAudioErrorCallback(DEAD_OBJECT);
@@ -339,33 +348,83 @@ void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
LOGW("AudioFlinger server died!");
}
-void AudioSystem::AudioFlingerClient::a2dpEnabledChanged(bool enabled) {
- gA2dpEnabled = enabled;
- LOGV("AudioFlinger A2DP enabled status changed! %d", enabled);
-}
+void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, void *param1, void *param2) {
+ LOGV("ioConfigChanged() event %d", event);
+ audio_io_handle_t ioHandle = (audio_io_handle_t)param1;
+ OutputDescriptor *desc;
+ uint32_t stream;
+
+ if (param1 == 0) return;
-void AudioSystem::setErrorCallback(audio_error_callback cb) {
Mutex::Autolock _l(AudioSystem::gLock);
- gAudioErrorCallback = cb;
-}
-int AudioSystem::getOutput(int streamType)
-{
- // make sure that gA2dpEnabled is valid by calling get_audio_flinger() which in turn
- // will call gAudioFlinger->isA2dpEnabled()
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return NUM_AUDIO_OUTPUT_TYPES;
+ switch (event) {
+ case STREAM_CONFIG_CHANGED:
+ if (param2 == 0) break;
+ stream = *(uint32_t *)param2;
+ LOGV("ioConfigChanged() STREAM_CONFIG_CHANGED stream %d, output %p", stream, ioHandle);
+ if (gStreamOutputMap.indexOfKey(stream) >= 0) {
+ gStreamOutputMap.replaceValueFor(stream, ioHandle);
+ }
+ break;
+ case OUTPUT_OPENED: {
+ if (gOutputs.indexOfKey(ioHandle) >= 0) {
+ LOGV("ioConfigChanged() opening already existing output! %p", ioHandle);
+ break;
+ }
+ if (param2 == 0) break;
+ desc = (OutputDescriptor *)param2;
+
+ OutputDescriptor *outputDesc = new OutputDescriptor(*desc);
+ gOutputs.add(ioHandle, outputDesc);
+ LOGV("ioConfigChanged() new output samplingRate %d, format %d channels %d frameCount %d latency %d",
+ outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency);
+ } break;
+ case OUTPUT_CLOSED: {
+ if (gOutputs.indexOfKey(ioHandle) < 0) {
+ LOGW("ioConfigChanged() closing unknow output! %p", ioHandle);
+ break;
+ }
+ LOGV("ioConfigChanged() output %p closed", ioHandle);
+
+ gOutputs.removeItem(ioHandle);
+ for (int i = gStreamOutputMap.size() - 1; i >= 0 ; i--) {
+ if (gStreamOutputMap.valueAt(i) == ioHandle) {
+ gStreamOutputMap.removeItemsAt(i);
+ }
+ }
+ } break;
+
+ case OUTPUT_CONFIG_CHANGED: {
+ int index = gOutputs.indexOfKey(ioHandle);
+ if (index < 0) {
+ LOGW("ioConfigChanged() modifying unknow output! %p", ioHandle);
+ break;
+ }
+ if (param2 == 0) break;
+ desc = (OutputDescriptor *)param2;
+
+ LOGV("ioConfigChanged() new config for output %p samplingRate %d, format %d channels %d frameCount %d latency %d",
+ ioHandle, desc->samplingRate, desc->format,
+ desc->channels, desc->frameCount, desc->latency);
+ OutputDescriptor *outputDesc = gOutputs.valueAt(index);
+ delete outputDesc;
+ outputDesc = new OutputDescriptor(*desc);
+ gOutputs.replaceValueFor(ioHandle, outputDesc);
+ } break;
+ case INPUT_OPENED:
+ case INPUT_CLOSED:
+ case INPUT_CONFIG_CHANGED:
+ break;
- if (streamType == DEFAULT) {
- streamType = MUSIC;
- }
- if (gA2dpEnabled && routedToA2dpOutput(streamType)) {
- return AUDIO_OUTPUT_A2DP;
- } else {
- return AUDIO_OUTPUT_HARDWARE;
}
}
+void AudioSystem::setErrorCallback(audio_error_callback cb) {
+ Mutex::Autolock _l(gLock);
+ gAudioErrorCallback = cb;
+}
+
bool AudioSystem::routedToA2dpOutput(int streamType) {
switch(streamType) {
case MUSIC:
@@ -379,6 +438,451 @@ bool AudioSystem::routedToA2dpOutput(int streamType) {
}
+// client singleton for AudioPolicyService binder interface
+sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
+sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
+
+
+// establish binder interface to AudioFlinger service
+const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
+{
+ gLock.lock();
+ if (gAudioPolicyService.get() == 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ do {
+ binder = sm->getService(String16("media.audio_policy"));
+ if (binder != 0)
+ break;
+ LOGW("AudioPolicyService not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while(true);
+ if (gAudioPolicyServiceClient == NULL) {
+ gAudioPolicyServiceClient = new AudioPolicyServiceClient();
+ }
+ binder->linkToDeath(gAudioPolicyServiceClient);
+ gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
+ gLock.unlock();
+ } else {
+ gLock.unlock();
+ }
+ return gAudioPolicyService;
+}
+
+status_t AudioSystem::setDeviceConnectionState(audio_devices device,
+ device_connection_state state,
+ const char *device_address)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+
+ return aps->setDeviceConnectionState(device, state, device_address);
+}
+
+AudioSystem::device_connection_state AudioSystem::getDeviceConnectionState(audio_devices device,
+ const char *device_address)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return DEVICE_STATE_UNAVAILABLE;
+
+ return aps->getDeviceConnectionState(device, device_address);
+}
+
+status_t AudioSystem::setPhoneState(int state)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+
+ return aps->setPhoneState(state);
+}
+
+status_t AudioSystem::setRingerMode(uint32_t mode, uint32_t mask)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setRingerMode(mode, mask);
+}
+
+status_t AudioSystem::setForceUse(force_use usage, forced_config config)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setForceUse(usage, config);
+}
+
+AudioSystem::forced_config AudioSystem::getForceUse(force_use usage)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return FORCE_NONE;
+ return aps->getForceUse(usage);
+}
+
+
+audio_io_handle_t AudioSystem::getOutput(stream_type stream,
+ uint32_t samplingRate,
+ uint32_t format,
+ uint32_t channels,
+ output_flags flags)
+{
+ audio_io_handle_t output = NULL;
+ if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0) {
+ Mutex::Autolock _l(gLock);
+ output = AudioSystem::gStreamOutputMap.valueFor(stream);
+ LOGV_IF((output != NULL), "getOutput() read %p from cache for stream %d", output, stream);
+ }
+ if (output == NULL) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return NULL;
+ output = aps->getOutput(stream, samplingRate, format, channels, flags);
+ if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0) {
+ Mutex::Autolock _l(gLock);
+ AudioSystem::gStreamOutputMap.add(stream, output);
+ }
+ }
+ return output;
+}
+
+status_t AudioSystem::startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->startOutput(output, stream);
+}
+
+status_t AudioSystem::stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->stopOutput(output, stream);
+}
+
+void AudioSystem::releaseOutput(audio_io_handle_t output)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return;
+ aps->releaseOutput(output);
+}
+
+audio_io_handle_t AudioSystem::getInput(int inputSource,
+ uint32_t samplingRate,
+ uint32_t format,
+ uint32_t channels,
+ audio_in_acoustics acoustics)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return NULL;
+ return aps->getInput(inputSource, samplingRate, format, channels, acoustics);
+}
+
+status_t AudioSystem::startInput(audio_io_handle_t input)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->startInput(input);
+}
+
+status_t AudioSystem::stopInput(audio_io_handle_t input)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->stopInput(input);
+}
+
+void AudioSystem::releaseInput(audio_io_handle_t input)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return;
+ aps->releaseInput(input);
+}
+
+status_t AudioSystem::initStreamVolume(stream_type stream,
+ int indexMin,
+ int indexMax)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->initStreamVolume(stream, indexMin, indexMax);
+}
+
+status_t AudioSystem::setStreamVolumeIndex(stream_type stream, int index)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setStreamVolumeIndex(stream, index);
+}
+
+status_t AudioSystem::getStreamVolumeIndex(stream_type stream, int *index)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->getStreamVolumeIndex(stream, index);
+}
+
+// ---------------------------------------------------------------------------
+
+void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
+ Mutex::Autolock _l(AudioSystem::gLock);
+ AudioSystem::gAudioPolicyService.clear();
+
+ LOGW("AudioPolicyService server died!");
+}
+
+// ---------------------------------------------------------------------------
+
+
+// use emulated popcount optimization
+// http://www.df.lth.se/~john_e/gems/gem002d.html
+uint32_t AudioSystem::popCount(uint32_t u)
+{
+ u = ((u&0x55555555) + ((u>>1)&0x55555555));
+ u = ((u&0x33333333) + ((u>>2)&0x33333333));
+ u = ((u&0x0f0f0f0f) + ((u>>4)&0x0f0f0f0f));
+ u = ((u&0x00ff00ff) + ((u>>8)&0x00ff00ff));
+ u = ( u&0x0000ffff) + (u>>16);
+ return u;
+}
+
+bool AudioSystem::isOutputDevice(audio_devices device)
+{
+ if ((popCount(device) == 1 ) &&
+ ((device & ~AudioSystem::DEVICE_OUT_ALL) == 0)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isInputDevice(audio_devices device)
+{
+ if ((popCount(device) == 1 ) &&
+ ((device & ~AudioSystem::DEVICE_IN_ALL) == 0)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isA2dpDevice(audio_devices device)
+{
+ if ((popCount(device) == 1 ) &&
+ (device & (AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP |
+ AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+ AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER))) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isBluetoothScoDevice(audio_devices device)
+{
+ if ((popCount(device) == 1 ) &&
+ (device & (AudioSystem::DEVICE_OUT_BLUETOOTH_SCO |
+ AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+ AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT))) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isLowVisibility(stream_type stream)
+{
+ if (stream == AudioSystem::SYSTEM || stream == AudioSystem::NOTIFICATION) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isInputChannel(uint32_t channel)
+{
+ if ((channel & ~AudioSystem::CHANNEL_IN_ALL) == 0) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isOutputChannel(uint32_t channel)
+{
+ if ((channel & ~AudioSystem::CHANNEL_OUT_ALL) == 0) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool AudioSystem::isValidFormat(uint32_t format)
+{
+ switch (format & MAIN_FORMAT_MASK) {
+ case PCM:
+ case MP3:
+ case AMR_NB:
+ case AMR_WB:
+ case AAC:
+ case HE_AAC_V1:
+ case HE_AAC_V2:
+ case VORBIS:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool AudioSystem::isLinearPCM(uint32_t format)
+{
+ switch (format) {
+ case PCM_16_BIT:
+ case PCM_8_BIT:
+ return true;
+ default:
+ return false;
+ }
+}
+
+//------------------------- AudioParameter class implementation ---------------
+
+const char *AudioParameter::keyRouting = "routing";
+const char *AudioParameter::keySamplingRate = "sampling_rate";
+const char *AudioParameter::keyFormat = "format";
+const char *AudioParameter::keyChannels = "channels";
+const char *AudioParameter::keyFrameCount = "frame_count";
+
+AudioParameter::AudioParameter(const String8& keyValuePairs)
+{
+ char *str = new char[keyValuePairs.length()+1];
+ mKeyValuePairs = keyValuePairs;
+
+ strcpy(str, keyValuePairs.string());
+ char *pair = strtok(str, ";");
+ while (pair != NULL) {
+ if (strlen(pair) != 0) {
+ size_t eqIdx = strcspn(pair, "=");
+ String8 key = String8(pair, eqIdx);
+ String8 value;
+ if (eqIdx == strlen(pair)) {
+ value = String8("");
+ } else {
+ value = String8(pair + eqIdx + 1);
+ }
+ if (mParameters.indexOfKey(key) < 0) {
+ mParameters.add(key, value);
+ } else {
+ mParameters.replaceValueFor(key, value);
+ }
+ } else {
+ LOGV("AudioParameter() cstor empty key value pair");
+ }
+ pair = strtok(NULL, ";");
+ }
+
+ delete[] str;
+}
+
+AudioParameter::~AudioParameter()
+{
+ mParameters.clear();
+}
+
+String8 AudioParameter::toString()
+{
+ String8 str = String8("");
+
+ size_t size = mParameters.size();
+ for (size_t i = 0; i < size; i++) {
+ str += mParameters.keyAt(i);
+ str += "=";
+ str += mParameters.valueAt(i);
+ if (i < (size - 1)) str += ";";
+ }
+ return str;
+}
+
+status_t AudioParameter::add(const String8& key, const String8& value)
+{
+ if (mParameters.indexOfKey(key) < 0) {
+ mParameters.add(key, value);
+ return NO_ERROR;
+ } else {
+ mParameters.replaceValueFor(key, value);
+ return ALREADY_EXISTS;
+ }
+}
+
+status_t AudioParameter::addInt(const String8& key, const int value)
+{
+ char str[12];
+ if (snprintf(str, 12, "%d", value) > 0) {
+ String8 str8 = String8(str);
+ return add(key, str8);
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+status_t AudioParameter::addFloat(const String8& key, const float value)
+{
+ char str[23];
+ if (snprintf(str, 23, "%.10f", value) > 0) {
+ String8 str8 = String8(str);
+ return add(key, str8);
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+status_t AudioParameter::remove(const String8& key)
+{
+ if (mParameters.indexOfKey(key) >= 0) {
+ mParameters.removeItem(key);
+ return NO_ERROR;
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+status_t AudioParameter::get(const String8& key, String8& value)
+{
+ if (mParameters.indexOfKey(key) >= 0) {
+ value = mParameters.valueFor(key);
+ return NO_ERROR;
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+status_t AudioParameter::getInt(const String8& key, int& value)
+{
+ String8 str8;
+ status_t result = get(key, str8);
+ value = 0;
+ if (result == NO_ERROR) {
+ int val;
+ if (sscanf(str8.string(), "%d", &val) == 1) {
+ value = val;
+ } else {
+ result = INVALID_OPERATION;
+ }
+ }
+ return result;
+}
+
+status_t AudioParameter::getFloat(const String8& key, float& value)
+{
+ String8 str8;
+ status_t result = get(key, str8);
+ value = 0;
+ if (result == NO_ERROR) {
+ float val;
+ if (sscanf(str8.string(), "%f", &val) == 1) {
+ value = val;
+ } else {
+ result = INVALID_OPERATION;
+ }
+ }
+ return result;
+}
}; // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index b2c067b..b147d25 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -32,9 +32,9 @@
#include <media/AudioTrack.h>
#include <utils/Log.h>
-#include <utils/MemoryDealer.h>
-#include <utils/Parcel.h>
-#include <utils/IPCThreadState.h>
+#include <binder/MemoryDealer.h>
+#include <binder/Parcel.h>
+#include <binder/IPCThreadState.h>
#include <utils/Timers.h>
#include <cutils/atomic.h>
@@ -54,7 +54,7 @@ AudioTrack::AudioTrack(
int streamType,
uint32_t sampleRate,
int format,
- int channelCount,
+ int channels,
int frameCount,
uint32_t flags,
callback_t cbf,
@@ -62,7 +62,7 @@ AudioTrack::AudioTrack(
int notificationFrames)
: mStatus(NO_INIT)
{
- mStatus = set(streamType, sampleRate, format, channelCount,
+ mStatus = set(streamType, sampleRate, format, channels,
frameCount, flags, cbf, user, notificationFrames, 0);
}
@@ -70,7 +70,7 @@ AudioTrack::AudioTrack(
int streamType,
uint32_t sampleRate,
int format,
- int channelCount,
+ int channels,
const sp<IMemory>& sharedBuffer,
uint32_t flags,
callback_t cbf,
@@ -78,7 +78,7 @@ AudioTrack::AudioTrack(
int notificationFrames)
: mStatus(NO_INIT)
{
- mStatus = set(streamType, sampleRate, format, channelCount,
+ mStatus = set(streamType, sampleRate, format, channels,
0, flags, cbf, user, notificationFrames, sharedBuffer);
}
@@ -97,6 +97,7 @@ AudioTrack::~AudioTrack()
}
mAudioTrack.clear();
IPCThreadState::self()->flushCommands();
+ AudioSystem::releaseOutput(getOutput());
}
}
@@ -104,7 +105,7 @@ status_t AudioTrack::set(
int streamType,
uint32_t sampleRate,
int format,
- int channelCount,
+ int channels,
int frameCount,
uint32_t flags,
callback_t cbf,
@@ -150,63 +151,84 @@ status_t AudioTrack::set(
if (format == 0) {
format = AudioSystem::PCM_16_BIT;
}
- if (channelCount == 0) {
- channelCount = 2;
+ if (channels == 0) {
+ channels = AudioSystem::CHANNEL_OUT_STEREO;
}
// validate parameters
- if (((format != AudioSystem::PCM_8_BIT) || sharedBuffer != 0) &&
- (format != AudioSystem::PCM_16_BIT)) {
+ if (!AudioSystem::isValidFormat(format)) {
LOGE("Invalid format");
return BAD_VALUE;
}
- if (channelCount != 1 && channelCount != 2) {
- LOGE("Invalid channel number");
+
+ // force direct flag if format is not linear PCM
+ if (!AudioSystem::isLinearPCM(format)) {
+ flags |= AudioSystem::OUTPUT_FLAG_DIRECT;
+ }
+
+ if (!AudioSystem::isOutputChannel(channels)) {
+ LOGE("Invalid channel mask");
return BAD_VALUE;
}
+ uint32_t channelCount = AudioSystem::popCount(channels);
- // Ensure that buffer depth covers at least audio hardware latency
- uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
- if (minBufCount < 2) minBufCount = 2;
+ audio_io_handle_t output = AudioSystem::getOutput((AudioSystem::stream_type)streamType,
+ sampleRate, format, channels, (AudioSystem::output_flags)flags);
- // When playing from shared buffer, playback will start even if last audioflinger
- // block is partly filled.
- if (sharedBuffer != 0 && minBufCount > 1) {
- minBufCount--;
+ if (output == 0) {
+ LOGE("Could not get audio output for stream type %d", streamType);
+ return BAD_VALUE;
}
- int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
-
- if (sharedBuffer == 0) {
- if (frameCount == 0) {
- frameCount = minFrameCount;
- }
- if (notificationFrames == 0) {
- notificationFrames = frameCount/2;
- }
- // Make sure that application is notified with sufficient margin
- // before underrun
- if (notificationFrames > frameCount/2) {
- notificationFrames = frameCount/2;
+ if (!AudioSystem::isLinearPCM(format)) {
+ if (sharedBuffer != 0) {
+ frameCount = sharedBuffer->size();
}
} else {
- // Ensure that buffer alignment matches channelcount
- if (((uint32_t)sharedBuffer->pointer() & (channelCount | 1)) != 0) {
- LOGE("Invalid buffer alignement: address %p, channelCount %d", sharedBuffer->pointer(), channelCount);
- return BAD_VALUE;
- }
- frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t);
- }
+ // Ensure that buffer depth covers at least audio hardware latency
+ uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
+ if (minBufCount < 2) minBufCount = 2;
- if (frameCount < minFrameCount) {
- LOGE("Invalid buffer size: minFrameCount %d, frameCount %d", minFrameCount, frameCount);
- return BAD_VALUE;
+ int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
+
+ if (sharedBuffer == 0) {
+ if (frameCount == 0) {
+ frameCount = minFrameCount;
+ }
+ if (notificationFrames == 0) {
+ notificationFrames = frameCount/2;
+ }
+ // Make sure that application is notified with sufficient margin
+ // before underrun
+ if (notificationFrames > frameCount/2) {
+ notificationFrames = frameCount/2;
+ }
+ if (frameCount < minFrameCount) {
+ LOGE("Invalid buffer size: minFrameCount %d, frameCount %d", minFrameCount, frameCount);
+ return BAD_VALUE;
+ }
+ } else {
+ // Ensure that buffer alignment matches channelcount
+ if (((uint32_t)sharedBuffer->pointer() & (channelCount | 1)) != 0) {
+ LOGE("Invalid buffer alignement: address %p, channelCount %d", sharedBuffer->pointer(), channelCount);
+ return BAD_VALUE;
+ }
+ frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t);
+ }
}
// create the track
status_t status;
sp<IAudioTrack> track = audioFlinger->createTrack(getpid(),
- streamType, sampleRate, format, channelCount, frameCount, flags, sharedBuffer, &status);
+ streamType,
+ sampleRate,
+ format,
+ channelCount,
+ frameCount,
+ ((uint16_t)flags) << 16,
+ sharedBuffer,
+ output,
+ &status);
if (track == 0) {
LOGE("AudioFlinger could not create track, status: %d", status);
@@ -245,6 +267,7 @@ status_t AudioTrack::set(
mVolume[RIGHT] = 1.0f;
mStreamType = streamType;
mFormat = format;
+ mChannels = channels;
mChannelCount = channelCount;
mSharedBuffer = sharedBuffer;
mMuted = false;
@@ -259,6 +282,7 @@ status_t AudioTrack::set(
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
+ mFlags = flags;
return NO_ERROR;
}
@@ -297,7 +321,11 @@ uint32_t AudioTrack::frameCount() const
int AudioTrack::frameSize() const
{
- return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+ if (AudioSystem::isLinearPCM(mFormat)) {
+ return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+ } else {
+ return sizeof(uint8_t);
+ }
}
sp<IMemory>& AudioTrack::sharedBuffer()
@@ -323,6 +351,7 @@ void AudioTrack::start()
}
if (android_atomic_or(1, &mActive) == 0) {
+ AudioSystem::startOutput(getOutput(), (AudioSystem::stream_type)mStreamType);
mNewPosition = mCblk->server + mUpdatePeriod;
mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS;
mCblk->waitTimeMs = 0;
@@ -367,6 +396,7 @@ void AudioTrack::stop()
} else {
setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_NORMAL);
}
+ AudioSystem::stopOutput(getOutput(), (AudioSystem::stream_type)mStreamType);
}
if (t != 0) {
@@ -382,12 +412,12 @@ bool AudioTrack::stopped() const
void AudioTrack::flush()
{
LOGV("flush");
-
+
// clear playback marker and periodic update counter
mMarkerPosition = 0;
mMarkerReached = false;
mUpdatePeriod = 0;
-
+
if (!mActive) {
mAudioTrack->flush();
@@ -403,6 +433,7 @@ void AudioTrack::pause()
if (android_atomic_and(~1, &mActive) == 1) {
mActive = 0;
mAudioTrack->pause();
+ AudioSystem::stopOutput(getOutput(), (AudioSystem::stream_type)mStreamType);
}
}
@@ -455,7 +486,6 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
{
audio_track_cblk_t* cblk = mCblk;
-
Mutex::Autolock _l(cblk->lock);
if (loopCount == 0) {
@@ -476,7 +506,7 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
LOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, framecount %d",
loopStart, loopEnd, mFrameCount);
return BAD_VALUE;
- }
+ }
cblk->loopStart = loopStart;
cblk->loopEnd = loopEnd;
@@ -555,7 +585,7 @@ status_t AudioTrack::setPosition(uint32_t position)
mCblk->server = position;
mCblk->forceReady = 1;
-
+
return NO_ERROR;
}
@@ -571,7 +601,7 @@ status_t AudioTrack::getPosition(uint32_t *position)
status_t AudioTrack::reload()
{
if (!stopped()) return INVALID_OPERATION;
-
+
flush();
mCblk->stepUser(mFrameCount);
@@ -579,6 +609,12 @@ status_t AudioTrack::reload()
return NO_ERROR;
}
+audio_io_handle_t AudioTrack::getOutput()
+{
+ return AudioSystem::getOutput((AudioSystem::stream_type)mStreamType,
+ mCblk->sampleRate, mFormat, mChannels, (AudioSystem::output_flags)mFlags);
+}
+
// -------------------------------------------------------------------------
status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -608,7 +644,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
return WOULD_BLOCK;
timeout = 0;
result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs));
- if (__builtin_expect(result!=NO_ERROR, false)) {
+ if (__builtin_expect(result!=NO_ERROR, false)) {
cblk->waitTimeMs += waitTimeMs;
if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) {
// timing out when a loop has been set and we have already written upto loop end
@@ -616,7 +652,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
if (cblk->user < cblk->loopEnd) {
LOGW( "obtainBuffer timed out (is the CPU pegged?) %p "
"user=%08x, server=%08x", this, cblk->user, cblk->server);
- //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140)
+ //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140)
cblk->lock.unlock();
mAudioTrack->start();
cblk->lock.lock();
@@ -624,7 +660,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
}
cblk->waitTimeMs = 0;
}
-
+
if (--waitCount == 0) {
return TIMED_OUT;
}
@@ -636,7 +672,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
}
cblk->waitTimeMs = 0;
-
+
if (framesReq > framesAvail) {
framesReq = framesAvail;
}
@@ -653,12 +689,16 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
"but didn't need to be locked. We recovered, but "
"this shouldn't happen (user=%08x, server=%08x)", cblk->user, cblk->server);
- audioBuffer->flags = mMuted ? Buffer::MUTE : 0;
- audioBuffer->channelCount= mChannelCount;
- audioBuffer->format = AudioSystem::PCM_16_BIT;
- audioBuffer->frameCount = framesReq;
- audioBuffer->size = framesReq*mChannelCount*sizeof(int16_t);
- audioBuffer->raw = (int8_t *)cblk->buffer(u);
+ audioBuffer->flags = mMuted ? Buffer::MUTE : 0;
+ audioBuffer->channelCount = mChannelCount;
+ audioBuffer->frameCount = framesReq;
+ audioBuffer->size = framesReq * cblk->frameSize;
+ if (AudioSystem::isLinearPCM(mFormat)) {
+ audioBuffer->format = AudioSystem::PCM_16_BIT;
+ } else {
+ audioBuffer->format = mFormat;
+ }
+ audioBuffer->raw = (int8_t *)cblk->buffer(u);
active = mActive;
return active ? status_t(NO_ERROR) : status_t(STOPPED);
}
@@ -690,10 +730,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize)
Buffer audioBuffer;
do {
- audioBuffer.frameCount = userSize/mChannelCount;
- if (mFormat == AudioSystem::PCM_16_BIT) {
- audioBuffer.frameCount >>= 1;
- }
+ audioBuffer.frameCount = userSize/frameSize();
+
// Calling obtainBuffer() with a negative wait count causes
// an (almost) infinite wait time.
status_t err = obtainBuffer(&audioBuffer, -1);
@@ -705,6 +743,7 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize)
}
size_t toWrite;
+
if (mFormat == AudioSystem::PCM_8_BIT) {
// Divide capacity by 2 to take expansion into account
toWrite = audioBuffer.size>>1;
@@ -742,13 +781,13 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
if (mCblk->flowControlFlag == 0) {
mCbf(EVENT_UNDERRUN, mUserData, 0);
if (mCblk->server == mCblk->frameCount) {
- mCbf(EVENT_BUFFER_END, mUserData, 0);
+ mCbf(EVENT_BUFFER_END, mUserData, 0);
}
mCblk->flowControlFlag = 1;
if (mSharedBuffer != 0) return false;
}
}
-
+
// Manage loop end callback
while (mLoopCount > mCblk->loopCount) {
int loopCount = -1;
@@ -767,7 +806,7 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
}
// Manage new position callback
- if(mUpdatePeriod > 0) {
+ if (mUpdatePeriod > 0) {
while (mCblk->server >= mNewPosition) {
mCbf(EVENT_NEW_POS, mUserData, (void *)&mNewPosition);
mNewPosition += mUpdatePeriod;
@@ -784,10 +823,10 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
do {
audioBuffer.frameCount = frames;
-
- // Calling obtainBuffer() with a wait count of 1
- // limits wait time to WAIT_PERIOD_MS. This prevents from being
- // stuck here not being able to handle timed events (position, markers, loops).
+
+ // Calling obtainBuffer() with a wait count of 1
+ // limits wait time to WAIT_PERIOD_MS. This prevents from being
+ // stuck here not being able to handle timed events (position, markers, loops).
status_t err = obtainBuffer(&audioBuffer, 1);
if (err < NO_ERROR) {
if (err != TIMED_OUT) {
@@ -832,7 +871,11 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
}
audioBuffer.size = writtenSize;
- audioBuffer.frameCount = writtenSize/mChannelCount/sizeof(int16_t);
+ // NOTE: mCblk->frameSize is not equal to AudioTrack::frameSize() for
+ // 8 bit PCM data: in this case, mCblk->frameSize is based on a sampel size of
+ // 16 bit.
+ audioBuffer.frameCount = writtenSize/mCblk->frameSize;
+
frames -= audioBuffer.frameCount;
releaseBuffer(&audioBuffer);
@@ -891,7 +934,7 @@ void AudioTrack::AudioTrackThread::onFirstRef()
// =========================================================================
audio_track_cblk_t::audio_track_cblk_t()
- : user(0), server(0), userBase(0), serverBase(0), buffers(0), frameCount(0),
+ : lock(Mutex::SHARED), user(0), server(0), userBase(0), serverBase(0), buffers(0), frameCount(0),
loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), volumeLR(0), flowControlFlag(1), forceReady(0)
{
}
@@ -949,7 +992,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount)
// we switch to normal obtainBuffer() timeout period
if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) {
bufferTimeoutMs = MAX_RUN_TIMEOUT_MS - 1;
- }
+ }
// It is possible that we receive a flush()
// while the mixer is processing a block: in this case,
// stepServer() is called After the flush() has reset u & s and
@@ -981,7 +1024,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount)
void* audio_track_cblk_t::buffer(uint32_t offset) const
{
- return (int16_t *)this->buffers + (offset-userBase)*this->channels;
+ return (int8_t *)this->buffers + (offset - userBase) * this->frameSize;
}
uint32_t audio_track_cblk_t::framesAvailable()
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index eeaa54f..9385367 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -16,12 +16,13 @@
*/
#define LOG_TAG "IAudioFlinger"
+//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <media/IAudioFlinger.h>
@@ -44,17 +45,21 @@ enum {
STREAM_VOLUME,
STREAM_MUTE,
SET_MODE,
- GET_MODE,
- SET_ROUTING,
- GET_ROUTING,
SET_MIC_MUTE,
GET_MIC_MUTE,
IS_MUSIC_ACTIVE,
- SET_PARAMETER,
+ SET_PARAMETERS,
+ GET_PARAMETERS,
REGISTER_CLIENT,
GET_INPUTBUFFERSIZE,
- WAKE_UP,
- IS_A2DP_ENABLED
+ OPEN_OUTPUT,
+ OPEN_DUPLICATE_OUTPUT,
+ CLOSE_OUTPUT,
+ SUSPEND_OUTPUT,
+ RESTORE_OUTPUT,
+ OPEN_INPUT,
+ CLOSE_INPUT,
+ SET_STREAM_OUTPUT
};
class BpAudioFlinger : public BpInterface<IAudioFlinger>
@@ -74,6 +79,7 @@ public:
int frameCount,
uint32_t flags,
const sp<IMemory>& sharedBuffer,
+ void *output,
status_t *status)
{
Parcel data, reply;
@@ -86,6 +92,7 @@ public:
data.writeInt32(frameCount);
data.writeInt32(flags);
data.writeStrongBinder(sharedBuffer->asBinder());
+ data.write(&output, sizeof(void *));
status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
if (lStatus != NO_ERROR) {
LOGE("createTrack error: %s", strerror(-lStatus));
@@ -99,7 +106,7 @@ public:
virtual sp<IAudioRecord> openRecord(
pid_t pid,
- int inputSource,
+ void *input,
uint32_t sampleRate,
int format,
int channelCount,
@@ -110,7 +117,7 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(pid);
- data.writeInt32(inputSource);
+ data.write(&input, sizeof(void *));
data.writeInt32(sampleRate);
data.writeInt32(format);
data.writeInt32(channelCount);
@@ -124,47 +131,47 @@ public:
return interface_cast<IAudioRecord>(reply.readStrongBinder());
}
- virtual uint32_t sampleRate(int output) const
+ virtual uint32_t sampleRate(void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(output);
+ data.write(&output, sizeof(void *));
remote()->transact(SAMPLE_RATE, data, &reply);
return reply.readInt32();
}
- virtual int channelCount(int output) const
+ virtual int channelCount(void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(output);
+ data.write(&output, sizeof(void *));
remote()->transact(CHANNEL_COUNT, data, &reply);
return reply.readInt32();
}
- virtual int format(int output) const
+ virtual int format(void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(output);
+ data.write(&output, sizeof(void *));
remote()->transact(FORMAT, data, &reply);
return reply.readInt32();
}
- virtual size_t frameCount(int output) const
+ virtual size_t frameCount(void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(output);
+ data.write(&output, sizeof(void *));
remote()->transact(FRAME_COUNT, data, &reply);
return reply.readInt32();
}
- virtual uint32_t latency(int output) const
+ virtual uint32_t latency(void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(output);
+ data.write(&output, sizeof(void *));
remote()->transact(LATENCY, data, &reply);
return reply.readInt32();
}
@@ -203,12 +210,13 @@ public:
return reply.readInt32();
}
- virtual status_t setStreamVolume(int stream, float value)
+ virtual status_t setStreamVolume(int stream, float value, void *output)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(stream);
data.writeFloat(value);
+ data.write(&output, sizeof(void *));
remote()->transact(SET_STREAM_VOLUME, data, &reply);
return reply.readInt32();
}
@@ -223,11 +231,12 @@ public:
return reply.readInt32();
}
- virtual float streamVolume(int stream) const
+ virtual float streamVolume(int stream, void *output) const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(stream);
+ data.write(&output, sizeof(void *));
remote()->transact(STREAM_VOLUME, data, &reply);
return reply.readFloat();
}
@@ -241,111 +250,205 @@ public:
return reply.readInt32();
}
- virtual status_t setRouting(int mode, uint32_t routes, uint32_t mask)
+ virtual status_t setMode(int mode)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(mode);
- data.writeInt32(routes);
- data.writeInt32(mask);
- remote()->transact(SET_ROUTING, data, &reply);
+ remote()->transact(SET_MODE, data, &reply);
return reply.readInt32();
}
- virtual uint32_t getRouting(int mode) const
+ virtual status_t setMicMute(bool state)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(mode);
- remote()->transact(GET_ROUTING, data, &reply);
+ data.writeInt32(state);
+ remote()->transact(SET_MIC_MUTE, data, &reply);
return reply.readInt32();
}
- virtual status_t setMode(int mode)
+ virtual bool getMicMute() const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(mode);
- remote()->transact(SET_MODE, data, &reply);
+ remote()->transact(GET_MIC_MUTE, data, &reply);
return reply.readInt32();
}
- virtual int getMode() const
+ virtual bool isMusicActive() const
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(GET_MODE, data, &reply);
+ remote()->transact(IS_MUSIC_ACTIVE, data, &reply);
return reply.readInt32();
}
- virtual status_t setMicMute(bool state)
+ virtual status_t setParameters(void *ioHandle, const String8& keyValuePairs)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(state);
- remote()->transact(SET_MIC_MUTE, data, &reply);
+ data.write(&ioHandle, sizeof(void *));
+ data.writeString8(keyValuePairs);
+ remote()->transact(SET_PARAMETERS, data, &reply);
return reply.readInt32();
}
- virtual bool getMicMute() const
+ virtual String8 getParameters(void *ioHandle, const String8& keys)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(GET_MIC_MUTE, data, &reply);
+ data.write(&ioHandle, sizeof(void *));
+ data.writeString8(keys);
+ remote()->transact(GET_PARAMETERS, data, &reply);
+ return reply.readString8();
+ }
+
+ virtual void registerClient(const sp<IAudioFlingerClient>& client)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeStrongBinder(client->asBinder());
+ remote()->transact(REGISTER_CLIENT, data, &reply);
+ }
+
+ virtual size_t getInputBufferSize(uint32_t sampleRate, int format, int channelCount)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(sampleRate);
+ data.writeInt32(format);
+ data.writeInt32(channelCount);
+ remote()->transact(GET_INPUTBUFFERSIZE, data, &reply);
return reply.readInt32();
}
- virtual bool isMusicActive() const
+ virtual void *openOutput(uint32_t *pDevices,
+ uint32_t *pSamplingRate,
+ uint32_t *pFormat,
+ uint32_t *pChannels,
+ uint32_t *pLatencyMs,
+ uint32_t flags)
{
Parcel data, reply;
+ uint32_t devices = pDevices ? *pDevices : 0;
+ uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0;
+ uint32_t format = pFormat ? *pFormat : 0;
+ uint32_t channels = pChannels ? *pChannels : 0;
+ uint32_t latency = pLatencyMs ? *pLatencyMs : 0;
+
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(IS_MUSIC_ACTIVE, data, &reply);
+ data.writeInt32(devices);
+ data.writeInt32(samplingRate);
+ data.writeInt32(format);
+ data.writeInt32(channels);
+ data.writeInt32(latency);
+ data.writeInt32(flags);
+ remote()->transact(OPEN_OUTPUT, data, &reply);
+ void *output;
+ reply.read(&output, sizeof(void *));
+ LOGV("openOutput() returned output, %p", output);
+ devices = reply.readInt32();
+ if (pDevices) *pDevices = devices;
+ samplingRate = reply.readInt32();
+ if (pSamplingRate) *pSamplingRate = samplingRate;
+ format = reply.readInt32();
+ if (pFormat) *pFormat = format;
+ channels = reply.readInt32();
+ if (pChannels) *pChannels = channels;
+ latency = reply.readInt32();
+ if (pLatencyMs) *pLatencyMs = latency;
+ return output;
+ }
+
+ virtual void *openDuplicateOutput(void *output1, void *output2)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(&output1, sizeof(void *));
+ data.write(&output2, sizeof(void *));
+ remote()->transact(OPEN_DUPLICATE_OUTPUT, data, &reply);
+ void *output;
+ reply.read(&output, sizeof(void *));
+ return output;
+ }
+
+ virtual status_t closeOutput(void *output)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(&output, sizeof(void *));
+ remote()->transact(CLOSE_OUTPUT, data, &reply);
return reply.readInt32();
}
- virtual status_t setParameter(const char* key, const char* value)
+ virtual status_t suspendOutput(void *output)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeCString(key);
- data.writeCString(value);
- remote()->transact(SET_PARAMETER, data, &reply);
+ data.write(&output, sizeof(void *));
+ remote()->transact(SUSPEND_OUTPUT, data, &reply);
return reply.readInt32();
}
-
- virtual void registerClient(const sp<IAudioFlingerClient>& client)
+
+ virtual status_t restoreOutput(void *output)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeStrongBinder(client->asBinder());
- remote()->transact(REGISTER_CLIENT, data, &reply);
+ data.write(&output, sizeof(void *));
+ remote()->transact(RESTORE_OUTPUT, data, &reply);
+ return reply.readInt32();
}
-
- virtual size_t getInputBufferSize(uint32_t sampleRate, int format, int channelCount)
+
+ virtual void *openInput(uint32_t *pDevices,
+ uint32_t *pSamplingRate,
+ uint32_t *pFormat,
+ uint32_t *pChannels,
+ uint32_t acoustics)
{
Parcel data, reply;
+ uint32_t devices = pDevices ? *pDevices : 0;
+ uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0;
+ uint32_t format = pFormat ? *pFormat : 0;
+ uint32_t channels = pChannels ? *pChannels : 0;
+
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(sampleRate);
+ data.writeInt32(devices);
+ data.writeInt32(samplingRate);
data.writeInt32(format);
- data.writeInt32(channelCount);
- remote()->transact(GET_INPUTBUFFERSIZE, data, &reply);
- return reply.readInt32();
+ data.writeInt32(channels);
+ data.writeInt32(acoustics);
+ remote()->transact(OPEN_INPUT, data, &reply);
+ void *input;
+ reply.read(&input, sizeof(void *));
+ devices = reply.readInt32();
+ if (pDevices) *pDevices = devices;
+ samplingRate = reply.readInt32();
+ if (pSamplingRate) *pSamplingRate = samplingRate;
+ format = reply.readInt32();
+ if (pFormat) *pFormat = format;
+ channels = reply.readInt32();
+ if (pChannels) *pChannels = channels;
+ return input;
}
-
- virtual void wakeUp()
+
+ virtual status_t closeInput(void *input)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(WAKE_UP, data, &reply, IBinder::FLAG_ONEWAY);
- return;
+ data.write(&input, sizeof(void *));
+ remote()->transact(CLOSE_INPUT, data, &reply);
+ return reply.readInt32();
}
- virtual bool isA2dpEnabled() const
+ virtual status_t setStreamOutput(uint32_t stream, void *output)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(IS_A2DP_ENABLED, data, &reply);
- return (bool)reply.readInt32();
+ data.writeInt32(stream);
+ data.write(&output, sizeof(void *));
+ remote()->transact(SET_STREAM_OUTPUT, data, &reply);
+ return reply.readInt32();
}
};
@@ -353,12 +456,6 @@ IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnAudioFlinger::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
@@ -373,10 +470,12 @@ status_t BnAudioFlinger::onTransact(
size_t bufferCount = data.readInt32();
uint32_t flags = data.readInt32();
sp<IMemory> buffer = interface_cast<IMemory>(data.readStrongBinder());
+ void *output;
+ data.read(&output, sizeof(void *));
status_t status;
sp<IAudioTrack> track = createTrack(pid,
streamType, sampleRate, format,
- channelCount, bufferCount, flags, buffer, &status);
+ channelCount, bufferCount, flags, buffer, output, &status);
reply->writeInt32(status);
reply->writeStrongBinder(track->asBinder());
return NO_ERROR;
@@ -384,14 +483,15 @@ status_t BnAudioFlinger::onTransact(
case OPEN_RECORD: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
pid_t pid = data.readInt32();
- int inputSource = data.readInt32();
+ void *input;
+ data.read(&input, sizeof(void *));
uint32_t sampleRate = data.readInt32();
int format = data.readInt32();
int channelCount = data.readInt32();
size_t bufferCount = data.readInt32();
uint32_t flags = data.readInt32();
status_t status;
- sp<IAudioRecord> record = openRecord(pid, inputSource,
+ sp<IAudioRecord> record = openRecord(pid, input,
sampleRate, format, channelCount, bufferCount, flags, &status);
reply->writeInt32(status);
reply->writeStrongBinder(record->asBinder());
@@ -399,31 +499,36 @@ status_t BnAudioFlinger::onTransact(
} break;
case SAMPLE_RATE: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- int output = data.readInt32();
+ void *output;
+ data.read(&output, sizeof(void *));
reply->writeInt32( sampleRate(output) );
return NO_ERROR;
} break;
case CHANNEL_COUNT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- int output = data.readInt32();
+ void *output;
+ data.read(&output, sizeof(void *));
reply->writeInt32( channelCount(output) );
return NO_ERROR;
} break;
case FORMAT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- int output = data.readInt32();
+ void *output;
+ data.read(&output, sizeof(void *));
reply->writeInt32( format(output) );
return NO_ERROR;
} break;
case FRAME_COUNT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- int output = data.readInt32();
+ void *output;
+ data.read(&output, sizeof(void *));
reply->writeInt32( frameCount(output) );
return NO_ERROR;
} break;
case LATENCY: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- int output = data.readInt32();
+ void *output;
+ data.read(&output, sizeof(void *));
reply->writeInt32( latency(output) );
return NO_ERROR;
} break;
@@ -450,7 +555,10 @@ status_t BnAudioFlinger::onTransact(
case SET_STREAM_VOLUME: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int stream = data.readInt32();
- reply->writeInt32( setStreamVolume(stream, data.readFloat()) );
+ float volume = data.readFloat();
+ void *output;
+ data.read(&output, sizeof(void *));
+ reply->writeInt32( setStreamVolume(stream, volume, output) );
return NO_ERROR;
} break;
case SET_STREAM_MUTE: {
@@ -462,7 +570,9 @@ status_t BnAudioFlinger::onTransact(
case STREAM_VOLUME: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int stream = data.readInt32();
- reply->writeFloat( streamVolume(stream) );
+ void *output;
+ data.read(&output, sizeof(void *));
+ reply->writeFloat( streamVolume(stream, output) );
return NO_ERROR;
} break;
case STREAM_MUTE: {
@@ -471,31 +581,12 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32( streamMute(stream) );
return NO_ERROR;
} break;
- case SET_ROUTING: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int mode = data.readInt32();
- uint32_t routes = data.readInt32();
- uint32_t mask = data.readInt32();
- reply->writeInt32( setRouting(mode, routes, mask) );
- return NO_ERROR;
- } break;
- case GET_ROUTING: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int mode = data.readInt32();
- reply->writeInt32( getRouting(mode) );
- return NO_ERROR;
- } break;
case SET_MODE: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int mode = data.readInt32();
reply->writeInt32( setMode(mode) );
return NO_ERROR;
} break;
- case GET_MODE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( getMode() );
- return NO_ERROR;
- } break;
case SET_MIC_MUTE: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int state = data.readInt32();
@@ -512,13 +603,23 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32( isMusicActive() );
return NO_ERROR;
} break;
- case SET_PARAMETER: {
+ case SET_PARAMETERS: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- const char *key = data.readCString();
- const char *value = data.readCString();
- reply->writeInt32( setParameter(key, value) );
+ void *ioHandle;
+ data.read(&ioHandle, sizeof(void *));
+ String8 keyValuePairs(data.readString8());
+ reply->writeInt32(setParameters(ioHandle, keyValuePairs));
return NO_ERROR;
- } break;
+ } break;
+ case GET_PARAMETERS: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ void *ioHandle;
+ data.read(&ioHandle, sizeof(void *));
+ String8 keys(data.readString8());
+ reply->writeString8(getParameters(ioHandle, keys));
+ return NO_ERROR;
+ } break;
+
case REGISTER_CLIENT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(data.readStrongBinder());
@@ -533,14 +634,93 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32( getInputBufferSize(sampleRate, format, channelCount) );
return NO_ERROR;
} break;
- case WAKE_UP: {
+ case OPEN_OUTPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ uint32_t devices = data.readInt32();
+ uint32_t samplingRate = data.readInt32();
+ uint32_t format = data.readInt32();
+ uint32_t channels = data.readInt32();
+ uint32_t latency = data.readInt32();
+ uint32_t flags = data.readInt32();
+ void *output = openOutput(&devices,
+ &samplingRate,
+ &format,
+ &channels,
+ &latency,
+ flags);
+ LOGV("OPEN_OUTPUT output, %p", output);
+ reply->write(&output, sizeof(void *));
+ reply->writeInt32(devices);
+ reply->writeInt32(samplingRate);
+ reply->writeInt32(format);
+ reply->writeInt32(channels);
+ reply->writeInt32(latency);
+ return NO_ERROR;
+ } break;
+ case OPEN_DUPLICATE_OUTPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ void *output1;
+ void *output2;
+ data.read(&output1, sizeof(void *));
+ data.read(&output2, sizeof(void *));
+ void *output = openDuplicateOutput(output1, output2);
+ reply->write(&output, sizeof(void *));
+ return NO_ERROR;
+ } break;
+ case CLOSE_OUTPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ void *output;
+ data.read(&output, sizeof(void *));
+ reply->writeInt32(closeOutput(output));
+ return NO_ERROR;
+ } break;
+ case SUSPEND_OUTPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ void *output;
+ data.read(&output, sizeof(void *));
+ reply->writeInt32(suspendOutput(output));
+ return NO_ERROR;
+ } break;
+ case RESTORE_OUTPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ void *output;
+ data.read(&output, sizeof(void *));
+ reply->writeInt32(restoreOutput(output));
+ return NO_ERROR;
+ } break;
+ case OPEN_INPUT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ uint32_t devices = data.readInt32();
+ uint32_t samplingRate = data.readInt32();
+ uint32_t format = data.readInt32();
+ uint32_t channels = data.readInt32();
+ uint32_t acoutics = data.readInt32();
+
+ void *input = openInput(&devices,
+ &samplingRate,
+ &format,
+ &channels,
+ acoutics);
+ reply->write(&input, sizeof(void *));
+ reply->writeInt32(devices);
+ reply->writeInt32(samplingRate);
+ reply->writeInt32(format);
+ reply->writeInt32(channels);
+ return NO_ERROR;
+ } break;
+ case CLOSE_INPUT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- wakeUp();
+ void *input;
+ data.read(&input, sizeof(void *));
+ reply->writeInt32(closeInput(input));
return NO_ERROR;
} break;
- case IS_A2DP_ENABLED: {
+ case SET_STREAM_OUTPUT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( (int)isA2dpEnabled() );
+ void *output;
+ uint32_t stream = data.readInt32();
+ data.read(&output, sizeof(void *));
+ reply->writeInt32(setStreamOutput(stream, output));
return NO_ERROR;
} break;
default:
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 9d00aef..eaae977 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -20,14 +20,15 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <media/IAudioFlingerClient.h>
+#include <media/AudioSystem.h>
namespace android {
enum {
- AUDIO_OUTPUT_CHANGED = IBinder::FIRST_CALL_TRANSACTION
+ IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION
};
class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient>
@@ -38,12 +39,25 @@ public:
{
}
- void a2dpEnabledChanged(bool enabled)
+ void ioConfigChanged(int event, void *param1, void *param2)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
- data.writeInt32((int)enabled);
- remote()->transact(AUDIO_OUTPUT_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
+ data.writeInt32(event);
+ data.write(&param1, sizeof(void *));
+ if (event == AudioSystem::STREAM_CONFIG_CHANGED) {
+ uint32_t stream = *(uint32_t *)param2;
+ LOGV("ioConfigChanged stream %d", stream);
+ data.writeInt32(stream);
+ } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) {
+ AudioSystem::OutputDescriptor *desc = (AudioSystem::OutputDescriptor *)param2;
+ data.writeInt32(desc->samplingRate);
+ data.writeInt32(desc->format);
+ data.writeInt32(desc->channels);
+ data.writeInt32(desc->frameCount);
+ data.writeInt32(desc->latency);
+ }
+ remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
}
};
@@ -51,20 +65,31 @@ IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient"
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnAudioFlingerClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch(code) {
- case AUDIO_OUTPUT_CHANGED: {
+ case IO_CONFIG_CHANGED: {
CHECK_INTERFACE(IAudioFlingerClient, data, reply);
- bool enabled = (bool)data.readInt32();
- a2dpEnabledChanged(enabled);
+ int event = data.readInt32();
+ void *param1;
+ void *param2 = 0;
+ AudioSystem::OutputDescriptor desc;
+ uint32_t stream;
+ data.read(&param1, sizeof(void *));
+ if (event == AudioSystem::STREAM_CONFIG_CHANGED) {
+ stream = data.readInt32();
+ param2 = &stream;
+ LOGV("STREAM_CONFIG_CHANGED stream %d", stream);
+ } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) {
+ desc.samplingRate = data.readInt32();
+ desc.format = data.readInt32();
+ desc.channels = data.readInt32();
+ desc.frameCount = data.readInt32();
+ desc.latency = data.readInt32();
+ param2 = &desc;
+ }
+ ioConfigChanged(event, param1, param2);
return NO_ERROR;
} break;
default:
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
new file mode 100644
index 0000000..0d8a329
--- /dev/null
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -0,0 +1,423 @@
+/*
+**
+** Copyright 2009, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "IAudioPolicyService"
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+
+#include <media/IAudioPolicyService.h>
+
+namespace android {
+
+enum {
+ SET_DEVICE_CONNECTION_STATE = IBinder::FIRST_CALL_TRANSACTION,
+ GET_DEVICE_CONNECTION_STATE,
+ SET_PHONE_STATE,
+ SET_RINGER_MODE,
+ SET_FORCE_USE,
+ GET_FORCE_USE,
+ GET_OUTPUT,
+ START_OUTPUT,
+ STOP_OUTPUT,
+ RELEASE_OUTPUT,
+ GET_INPUT,
+ START_INPUT,
+ STOP_INPUT,
+ RELEASE_INPUT,
+ INIT_STREAM_VOLUME,
+ SET_STREAM_VOLUME,
+ GET_STREAM_VOLUME
+};
+
+class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
+{
+public:
+ BpAudioPolicyService(const sp<IBinder>& impl)
+ : BpInterface<IAudioPolicyService>(impl)
+ {
+ }
+
+ virtual status_t setDeviceConnectionState(
+ AudioSystem::audio_devices device,
+ AudioSystem::device_connection_state state,
+ const char *device_address)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(device));
+ data.writeInt32(static_cast <uint32_t>(state));
+ data.writeCString(device_address);
+ remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual AudioSystem::device_connection_state getDeviceConnectionState(
+ AudioSystem::audio_devices device,
+ const char *device_address)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(device));
+ data.writeCString(device_address);
+ remote()->transact(GET_DEVICE_CONNECTION_STATE, data, &reply);
+ return static_cast <AudioSystem::device_connection_state>(reply.readInt32());
+ }
+
+ virtual status_t setPhoneState(int state)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(state);
+ remote()->transact(SET_PHONE_STATE, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t setRingerMode(uint32_t mode, uint32_t mask)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(mode);
+ data.writeInt32(mask);
+ remote()->transact(SET_RINGER_MODE, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t setForceUse(AudioSystem::force_use usage, AudioSystem::forced_config config)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(usage));
+ data.writeInt32(static_cast <uint32_t>(config));
+ remote()->transact(SET_FORCE_USE, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual AudioSystem::forced_config getForceUse(AudioSystem::force_use usage)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(usage));
+ remote()->transact(GET_FORCE_USE, data, &reply);
+ return static_cast <AudioSystem::forced_config> (reply.readInt32());
+ }
+
+ virtual audio_io_handle_t getOutput(
+ AudioSystem::stream_type stream,
+ uint32_t samplingRate,
+ uint32_t format,
+ uint32_t channels,
+ AudioSystem::output_flags flags)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(stream));
+ data.writeInt32(samplingRate);
+ data.writeInt32(static_cast <uint32_t>(format));
+ data.writeInt32(channels);
+ data.writeInt32(static_cast <uint32_t>(flags));
+ remote()->transact(GET_OUTPUT, data, &reply);
+ audio_io_handle_t output;
+ reply.read(&output, sizeof(audio_io_handle_t));
+ return output;
+ }
+
+ virtual status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&output, sizeof(audio_io_handle_t));
+ data.writeInt32(stream);
+ remote()->transact(START_OUTPUT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&output, sizeof(audio_io_handle_t));
+ data.writeInt32(stream);
+ remote()->transact(STOP_OUTPUT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual void releaseOutput(audio_io_handle_t output)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&output, sizeof(audio_io_handle_t));
+ remote()->transact(RELEASE_OUTPUT, data, &reply);
+ }
+
+ virtual audio_io_handle_t getInput(
+ int inputSource,
+ uint32_t samplingRate,
+ uint32_t format,
+ uint32_t channels,
+ AudioSystem::audio_in_acoustics acoustics)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(inputSource);
+ data.writeInt32(samplingRate);
+ data.writeInt32(static_cast <uint32_t>(format));
+ data.writeInt32(channels);
+ data.writeInt32(static_cast <uint32_t>(acoustics));
+ remote()->transact(GET_INPUT, data, &reply);
+ audio_io_handle_t input;
+ reply.read(&input, sizeof(audio_io_handle_t));
+ return input;
+ }
+
+ virtual status_t startInput(audio_io_handle_t input)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&input, sizeof(audio_io_handle_t));
+ remote()->transact(START_INPUT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t stopInput(audio_io_handle_t input)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&input, sizeof(audio_io_handle_t));
+ remote()->transact(STOP_INPUT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual void releaseInput(audio_io_handle_t input)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&input, sizeof(audio_io_handle_t));
+ remote()->transact(RELEASE_INPUT, data, &reply);
+ }
+
+ virtual status_t initStreamVolume(AudioSystem::stream_type stream,
+ int indexMin,
+ int indexMax)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(stream));
+ data.writeInt32(indexMin);
+ data.writeInt32(indexMax);
+ remote()->transact(INIT_STREAM_VOLUME, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t setStreamVolumeIndex(AudioSystem::stream_type stream, int index)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(stream));
+ data.writeInt32(index);
+ remote()->transact(SET_STREAM_VOLUME, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(stream));
+ remote()->transact(GET_STREAM_VOLUME, data, &reply);
+ int lIndex = reply.readInt32();
+ if (index) *index = lIndex;
+ return static_cast <status_t> (reply.readInt32());
+ }
+};
+
+IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
+
+// ----------------------------------------------------------------------
+
+
+status_t BnAudioPolicyService::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+{
+ switch(code) {
+ case SET_DEVICE_CONNECTION_STATE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32());
+ AudioSystem::device_connection_state state = static_cast <AudioSystem::device_connection_state>(data.readInt32());
+ const char *device_address = data.readCString();
+ reply->writeInt32(static_cast <uint32_t>(setDeviceConnectionState(device, state, device_address)));
+ return NO_ERROR;
+ } break;
+
+ case GET_DEVICE_CONNECTION_STATE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32());
+ const char *device_address = data.readCString();
+ reply->writeInt32(static_cast <uint32_t>(getDeviceConnectionState(device, device_address)));
+ return NO_ERROR;
+ } break;
+
+ case SET_PHONE_STATE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ reply->writeInt32(static_cast <uint32_t>(setPhoneState(data.readInt32())));
+ return NO_ERROR;
+ } break;
+
+ case SET_RINGER_MODE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ uint32_t mode = data.readInt32();
+ uint32_t mask = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(setRingerMode(mode, mask)));
+ return NO_ERROR;
+ } break;
+
+ case SET_FORCE_USE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::force_use usage = static_cast <AudioSystem::force_use>(data.readInt32());
+ AudioSystem::forced_config config = static_cast <AudioSystem::forced_config>(data.readInt32());
+ reply->writeInt32(static_cast <uint32_t>(setForceUse(usage, config)));
+ return NO_ERROR;
+ } break;
+
+ case GET_FORCE_USE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::force_use usage = static_cast <AudioSystem::force_use>(data.readInt32());
+ reply->writeInt32(static_cast <uint32_t>(getForceUse(usage)));
+ return NO_ERROR;
+ } break;
+
+ case GET_OUTPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ uint32_t samplingRate = data.readInt32();
+ uint32_t format = data.readInt32();
+ uint32_t channels = data.readInt32();
+ AudioSystem::output_flags flags = static_cast <AudioSystem::output_flags>(data.readInt32());
+
+ audio_io_handle_t output = getOutput(stream,
+ samplingRate,
+ format,
+ channels,
+ flags);
+ reply->write(&output, sizeof(audio_io_handle_t));
+ return NO_ERROR;
+ } break;
+
+ case START_OUTPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t output;
+ data.read(&output, sizeof(audio_io_handle_t));
+ uint32_t stream = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(startOutput(output, (AudioSystem::stream_type)stream)));
+ return NO_ERROR;
+ } break;
+
+ case STOP_OUTPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t output;
+ data.read(&output, sizeof(audio_io_handle_t));
+ uint32_t stream = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(stopOutput(output, (AudioSystem::stream_type)stream)));
+ return NO_ERROR;
+ } break;
+
+ case RELEASE_OUTPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t output;
+ data.read(&output, sizeof(audio_io_handle_t));
+ releaseOutput(output);
+ return NO_ERROR;
+ } break;
+
+ case GET_INPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ int inputSource = data.readInt32();
+ uint32_t samplingRate = data.readInt32();
+ uint32_t format = data.readInt32();
+ uint32_t channels = data.readInt32();
+ AudioSystem::audio_in_acoustics acoustics = static_cast <AudioSystem::audio_in_acoustics>(data.readInt32());
+ audio_io_handle_t input = getInput(inputSource,
+ samplingRate,
+ format,
+ channels,
+ acoustics);
+ reply->write(&input, sizeof(audio_io_handle_t));
+ return NO_ERROR;
+ } break;
+
+ case START_INPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t input;
+ data.read(&input, sizeof(audio_io_handle_t));
+ reply->writeInt32(static_cast <uint32_t>(startInput(input)));
+ return NO_ERROR;
+ } break;
+
+ case STOP_INPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t input;
+ data.read(&input, sizeof(audio_io_handle_t));
+ reply->writeInt32(static_cast <uint32_t>(stopInput(input)));
+ return NO_ERROR;
+ } break;
+
+ case RELEASE_INPUT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_io_handle_t input;
+ data.read(&input, sizeof(audio_io_handle_t));
+ releaseInput(input);
+ return NO_ERROR;
+ } break;
+
+ case INIT_STREAM_VOLUME: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ int indexMin = data.readInt32();
+ int indexMax = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(initStreamVolume(stream, indexMin,indexMax)));
+ return NO_ERROR;
+ } break;
+
+ case SET_STREAM_VOLUME: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ int index = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(setStreamVolumeIndex(stream, index)));
+ return NO_ERROR;
+ } break;
+
+ case GET_STREAM_VOLUME: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ int index;
+ status_t status = getStreamVolumeIndex(stream, &index);
+ reply->writeInt32(index);
+ reply->writeInt32(static_cast <uint32_t>(status));
+ return NO_ERROR;
+ } break;
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 6e42dac..8fb5d3d 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -18,7 +18,7 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <media/IAudioRecord.h>
@@ -66,12 +66,6 @@ IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnAudioRecord::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index abc202d..75b861b 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -18,7 +18,7 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <media/IAudioTrack.h>
@@ -91,12 +91,6 @@ IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnAudioTrack::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 85b5944..397a55b 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -17,7 +17,7 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <SkBitmap.h>
#include <media/IMediaMetadataRetriever.h>
@@ -126,16 +126,10 @@ public:
}
};
-IMPLEMENT_META_INTERFACE(MediaMetadataRetriever, "android.hardware.IMediaMetadataRetriever");
+IMPLEMENT_META_INTERFACE(MediaMetadataRetriever, "android.media.IMediaMetadataRetriever");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnMediaMetadataRetriever::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
@@ -215,4 +209,3 @@ status_t BnMediaMetadataRetriever::onTransact(
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index f18765a..5d9db10 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -18,7 +18,7 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <media/IMediaPlayer.h>
#include <ui/ISurface.h>
@@ -39,7 +39,10 @@ enum {
RESET,
SET_AUDIO_STREAM_TYPE,
SET_LOOPING,
- SET_VOLUME
+ SET_VOLUME,
+ INVOKE,
+ SET_METADATA_FILTER,
+ GET_METADATA,
};
class BpMediaPlayer: public BpInterface<IMediaPlayer>
@@ -170,18 +173,38 @@ public:
remote()->transact(SET_VOLUME, data, &reply);
return reply.readInt32();
}
+
+ status_t invoke(const Parcel& request, Parcel *reply)
+ { // Avoid doing any extra copy. The interface descriptor should
+ // have been set by MediaPlayer.java.
+ return remote()->transact(INVOKE, request, reply);
+ }
+
+ status_t setMetadataFilter(const Parcel& request)
+ {
+ Parcel reply;
+ // Avoid doing any extra copy of the request. The interface
+ // descriptor should have been set by MediaPlayer.java.
+ remote()->transact(SET_METADATA_FILTER, request, &reply);
+ return reply.readInt32();
+ }
+
+ status_t getMetadata(bool update_only, bool apply_filter, Parcel *reply)
+ {
+ Parcel request;
+ request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ // TODO: Burning 2 ints for 2 boolean. Should probably use flags in an int here.
+ request.writeInt32(update_only);
+ request.writeInt32(apply_filter);
+ remote()->transact(GET_METADATA, request, reply);
+ return reply->readInt32();
+ }
};
-IMPLEMENT_META_INTERFACE(MediaPlayer, "android.hardware.IMediaPlayer");
+IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnMediaPlayer::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
@@ -266,6 +289,24 @@ status_t BnMediaPlayer::onTransact(
reply->writeInt32(setVolume(data.readFloat(), data.readFloat()));
return NO_ERROR;
} break;
+ case INVOKE: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+ invoke(data, reply);
+ return NO_ERROR;
+ } break;
+ case SET_METADATA_FILTER: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+ reply->writeInt32(setMetadataFilter(data));
+ return NO_ERROR;
+ } break;
+ case GET_METADATA: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+ const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
+ reply->setDataPosition(0);
+ reply->writeInt32(retcode);
+ reply->setDataPosition(0);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
@@ -274,4 +315,3 @@ status_t BnMediaPlayer::onTransact(
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp
index 65022cd..bf51829 100644
--- a/media/libmedia/IMediaPlayerClient.cpp
+++ b/media/libmedia/IMediaPlayerClient.cpp
@@ -16,8 +16,8 @@
*/
#include <utils/RefBase.h>
-#include <utils/IInterface.h>
-#include <utils/Parcel.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
#include <media/IMediaPlayerClient.h>
@@ -46,16 +46,10 @@ public:
}
};
-IMPLEMENT_META_INTERFACE(MediaPlayerClient, "android.hardware.IMediaPlayerClient");
+IMPLEMENT_META_INTERFACE(MediaPlayerClient, "android.media.IMediaPlayerClient");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnMediaPlayerClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
@@ -74,4 +68,3 @@ status_t BnMediaPlayerClient::onTransact(
}
}; // namespace android
-
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 01cdb6c..8d2c360 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -17,11 +17,14 @@
#include <stdint.h>
#include <sys/types.h>
-#include <utils/Parcel.h>
-#include <utils/IMemory.h>
+#include <binder/Parcel.h>
+#include <binder/IMemory.h>
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
+#include <media/IOMX.h>
+
+#include <utils/Errors.h> // for status_t
namespace android {
@@ -32,6 +35,7 @@ enum {
DECODE_FD,
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
+ CREATE_OMX,
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -109,18 +113,19 @@ public:
*pFormat = reply.readInt32();
return interface_cast<IMemory>(reply.readStrongBinder());
}
+
+ virtual sp<IOMX> createOMX() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+ remote()->transact(CREATE_OMX, data, &reply);
+ return interface_cast<IOMX>(reply.readStrongBinder());
+ }
};
-IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.hardware.IMediaPlayerService");
+IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnMediaPlayerService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
@@ -187,6 +192,12 @@ status_t BnMediaPlayerService::onTransact(
reply->writeStrongBinder(retriever->asBinder());
return NO_ERROR;
} break;
+ case CREATE_OMX: {
+ CHECK_INTERFACE(IMediaPlayerService, data, reply);
+ sp<IOMX> omx = createOMX();
+ reply->writeStrongBinder(omx->asBinder());
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 84d08c4..df7d301 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -18,7 +18,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "IMediaRecorder"
#include <utils/Log.h>
-#include <utils/Parcel.h>
+#include <binder/Parcel.h>
#include <ui/ISurface.h>
#include <ui/ICamera.h>
#include <media/IMediaPlayerClient.h>
@@ -264,16 +264,10 @@ public:
}
};
-IMPLEMENT_META_INTERFACE(MediaRecorder, "android.hardware.IMediaRecorder");
+IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
// ----------------------------------------------------------------------
-#define CHECK_INTERFACE(interface, data, reply) \
- do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
- LOGW("Call incorrectly routed to " #interface); \
- return PERMISSION_DENIED; \
- } } while (0)
-
status_t BnMediaRecorder::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
new file mode 100644
index 0000000..f2a657a
--- /dev/null
+++ b/media/libmedia/IOMX.cpp
@@ -0,0 +1,561 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IOMX"
+#include <utils/Log.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <media/IOMX.h>
+
+namespace android {
+
+enum {
+ CONNECT = IBinder::FIRST_CALL_TRANSACTION,
+ LIST_NODES,
+ ALLOCATE_NODE,
+ FREE_NODE,
+ SEND_COMMAND,
+ GET_PARAMETER,
+ SET_PARAMETER,
+ USE_BUFFER,
+ ALLOC_BUFFER,
+ ALLOC_BUFFER_WITH_BACKUP,
+ FREE_BUFFER,
+ OBSERVE_NODE,
+ FILL_BUFFER,
+ EMPTY_BUFFER,
+ OBSERVER_ON_MSG,
+};
+
+static void *readVoidStar(const Parcel *parcel) {
+ // FIX if sizeof(void *) != sizeof(int32)
+ return (void *)parcel->readInt32();
+}
+
+static void writeVoidStar(void *x, Parcel *parcel) {
+ // FIX if sizeof(void *) != sizeof(int32)
+ parcel->writeInt32((int32_t)x);
+}
+
+class BpOMX : public BpInterface<IOMX> {
+public:
+ BpOMX(const sp<IBinder> &impl)
+ : BpInterface<IOMX>(impl) {
+ }
+
+#if IOMX_USES_SOCKETS
+ virtual status_t connect(int *sd) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ remote()->transact(CONNECT, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err == OK) {
+ *sd = dup(reply.readFileDescriptor());
+ } else {
+ *sd = -1;
+ }
+
+ return reply.readInt32();
+ }
+#endif
+
+ virtual status_t list_nodes(List<String8> *list) {
+ list->clear();
+
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ remote()->transact(LIST_NODES, data, &reply);
+
+ int32_t n = reply.readInt32();
+ for (int32_t i = 0; i < n; ++i) {
+ String8 s = reply.readString8();
+
+ list->push_back(s);
+ }
+
+ return OK;
+ }
+
+ virtual status_t allocate_node(const char *name, node_id *node) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeCString(name);
+ remote()->transact(ALLOCATE_NODE, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err == OK) {
+ *node = readVoidStar(&reply);
+ } else {
+ *node = 0;
+ }
+
+ return err;
+ }
+
+ virtual status_t free_node(node_id node) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ remote()->transact(FREE_NODE, data, &reply);
+
+ return reply.readInt32();
+ }
+
+ virtual status_t send_command(
+ node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(cmd);
+ data.writeInt32(param);
+ remote()->transact(SEND_COMMAND, data, &reply);
+
+ return reply.readInt32();
+ }
+
+ virtual status_t get_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ void *params, size_t size) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(index);
+ data.writeInt32(size);
+ data.write(params, size);
+ remote()->transact(GET_PARAMETER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ return err;
+ }
+
+ reply.read(params, size);
+
+ return OK;
+ }
+
+ virtual status_t set_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ const void *params, size_t size) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(index);
+ data.writeInt32(size);
+ data.write(params, size);
+ remote()->transact(SET_PARAMETER, data, &reply);
+
+ return reply.readInt32();
+ }
+
+ virtual status_t use_buffer(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(port_index);
+ data.writeStrongBinder(params->asBinder());
+ remote()->transact(USE_BUFFER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = readVoidStar(&reply);
+
+ return err;
+ }
+
+ virtual status_t allocate_buffer(
+ node_id node, OMX_U32 port_index, size_t size,
+ buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(port_index);
+ data.writeInt32(size);
+ remote()->transact(ALLOC_BUFFER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = readVoidStar(&reply);
+
+ return err;
+ }
+
+ virtual status_t allocate_buffer_with_backup(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(port_index);
+ data.writeStrongBinder(params->asBinder());
+ remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = readVoidStar(&reply);
+
+ return err;
+ }
+
+ virtual status_t free_buffer(
+ node_id node, OMX_U32 port_index, buffer_id buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeInt32(port_index);
+ writeVoidStar(buffer, &data);
+ remote()->transact(FREE_BUFFER, data, &reply);
+
+ return reply.readInt32();
+ }
+
+#if !IOMX_USES_SOCKETS
+ virtual status_t observe_node(
+ node_id node, const sp<IOMXObserver> &observer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ data.writeStrongBinder(observer->asBinder());
+ remote()->transact(OBSERVE_NODE, data, &reply);
+
+ return reply.readInt32();
+ }
+
+ virtual void fill_buffer(node_id node, buffer_id buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ writeVoidStar(buffer, &data);
+ remote()->transact(FILL_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+
+ virtual void empty_buffer(
+ node_id node,
+ buffer_id buffer,
+ OMX_U32 range_offset, OMX_U32 range_length,
+ OMX_U32 flags, OMX_TICKS timestamp) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ writeVoidStar(node, &data);
+ writeVoidStar(buffer, &data);
+ data.writeInt32(range_offset);
+ data.writeInt32(range_length);
+ data.writeInt32(flags);
+ data.writeInt64(timestamp);
+ remote()->transact(EMPTY_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+#endif
+};
+
+IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
+
+////////////////////////////////////////////////////////////////////////////////
+
+#define CHECK_INTERFACE(interface, data, reply) \
+ do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \
+ LOGW("Call incorrectly routed to " #interface); \
+ return PERMISSION_DENIED; \
+ } } while (0)
+
+status_t BnOMX::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+#if IOMX_USES_SOCKETS
+ case CONNECT:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ int s;
+ status_t err = connect(&s);
+
+ reply->writeInt32(err);
+ if (err == OK) {
+ assert(s >= 0);
+ reply->writeDupFileDescriptor(s);
+ close(s);
+ s = -1;
+ } else {
+ assert(s == -1);
+ }
+
+ return NO_ERROR;
+ }
+#endif
+
+ case LIST_NODES:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ List<String8> list;
+ list_nodes(&list);
+
+ reply->writeInt32(list.size());
+ for (List<String8>::iterator it = list.begin();
+ it != list.end(); ++it) {
+ reply->writeString8(*it);
+ }
+
+ return NO_ERROR;
+ }
+
+ case ALLOCATE_NODE:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node;
+ status_t err = allocate_node(data.readCString(), &node);
+ reply->writeInt32(err);
+ if (err == OK) {
+ writeVoidStar(node, reply);
+ }
+
+ return NO_ERROR;
+ }
+
+ case FREE_NODE:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+
+ reply->writeInt32(free_node(node));
+
+ return NO_ERROR;
+ }
+
+ case SEND_COMMAND:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+
+ OMX_COMMANDTYPE cmd =
+ static_cast<OMX_COMMANDTYPE>(data.readInt32());
+
+ OMX_S32 param = data.readInt32();
+ reply->writeInt32(send_command(node, cmd, param));
+
+ return NO_ERROR;
+ }
+
+ case GET_PARAMETER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
+
+ size_t size = data.readInt32();
+
+ // XXX I am not happy with this but Parcel::readInplace didn't work.
+ void *params = malloc(size);
+ data.read(params, size);
+
+ status_t err = get_parameter(node, index, params, size);
+
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->write(params, size);
+ }
+
+ free(params);
+ params = NULL;
+
+ return NO_ERROR;
+ }
+
+ case SET_PARAMETER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
+
+ size_t size = data.readInt32();
+ void *params = const_cast<void *>(data.readInplace(size));
+
+ reply->writeInt32(set_parameter(node, index, params, size));
+
+ return NO_ERROR;
+ }
+
+ case USE_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_U32 port_index = data.readInt32();
+ sp<IMemory> params =
+ interface_cast<IMemory>(data.readStrongBinder());
+
+ buffer_id buffer;
+ status_t err = use_buffer(node, port_index, params, &buffer);
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ writeVoidStar(buffer, reply);
+ }
+
+ return NO_ERROR;
+ }
+
+ case ALLOC_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_U32 port_index = data.readInt32();
+ size_t size = data.readInt32();
+
+ buffer_id buffer;
+ status_t err = allocate_buffer(node, port_index, size, &buffer);
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ writeVoidStar(buffer, reply);
+ }
+
+ return NO_ERROR;
+ }
+
+ case ALLOC_BUFFER_WITH_BACKUP:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_U32 port_index = data.readInt32();
+ sp<IMemory> params =
+ interface_cast<IMemory>(data.readStrongBinder());
+
+ buffer_id buffer;
+ status_t err = allocate_buffer_with_backup(
+ node, port_index, params, &buffer);
+
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ writeVoidStar(buffer, reply);
+ }
+
+ return NO_ERROR;
+ }
+
+ case FREE_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ OMX_U32 port_index = data.readInt32();
+ buffer_id buffer = readVoidStar(&data);
+ reply->writeInt32(free_buffer(node, port_index, buffer));
+
+ return NO_ERROR;
+ }
+
+#if !IOMX_USES_SOCKETS
+ case OBSERVE_NODE:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ sp<IOMXObserver> observer =
+ interface_cast<IOMXObserver>(data.readStrongBinder());
+ reply->writeInt32(observe_node(node, observer));
+
+ return NO_ERROR;
+ }
+
+ case FILL_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ buffer_id buffer = readVoidStar(&data);
+ fill_buffer(node, buffer);
+
+ return NO_ERROR;
+ }
+
+ case EMPTY_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = readVoidStar(&data);
+ buffer_id buffer = readVoidStar(&data);
+ OMX_U32 range_offset = data.readInt32();
+ OMX_U32 range_length = data.readInt32();
+ OMX_U32 flags = data.readInt32();
+ OMX_TICKS timestamp = data.readInt64();
+
+ empty_buffer(
+ node, buffer, range_offset, range_length,
+ flags, timestamp);
+
+ return NO_ERROR;
+ }
+#endif
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+class BpOMXObserver : public BpInterface<IOMXObserver> {
+public:
+ BpOMXObserver(const sp<IBinder> &impl)
+ : BpInterface<IOMXObserver>(impl) {
+ }
+
+ virtual void on_message(const omx_message &msg) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
+ data.write(&msg, sizeof(msg));
+
+ remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(OMXObserver, "android.hardware.IOMXObserver");
+
+status_t BnOMXObserver::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case OBSERVER_ON_MSG:
+ {
+ CHECK_INTERFACE(IOMXObserver, data, reply);
+
+ omx_message msg;
+ data.read(&msg, sizeof(msg));
+
+ // XXX Could use readInplace maybe?
+ on_message(msg);
+
+ return NO_ERROR;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+} // namespace android
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index 586aacb..ee9e1d8 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -99,7 +99,7 @@ int JetPlayer::init()
mAudioTrack->set(AudioSystem::MUSIC, //TODO parametrize this
pLibConfig->sampleRate,
1, // format = PCM 16bits per sample,
- pLibConfig->numChannels,
+ (pLibConfig->numChannels == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO,
mTrackBufferSize,
0);
diff --git a/media/libmedia/Metadata.cpp b/media/libmedia/Metadata.cpp
new file mode 100644
index 0000000..35ec6b3
--- /dev/null
+++ b/media/libmedia/Metadata.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Metadata"
+#include <utils/Log.h>
+
+#include <sys/types.h>
+#include <media/Metadata.h>
+#include <binder/Parcel.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+// This file contains code to serialize Metadata triples (key, type,
+// value) into a parcel. The Parcel is destinated to be decoded by the
+// Metadata.java class.
+
+namespace {
+// All these constants below must be kept in sync with Metadata.java.
+enum MetadataId {
+ FIRST_SYSTEM_ID = 1,
+ LAST_SYSTEM_ID = 31,
+ FIRST_CUSTOM_ID = 8192
+};
+
+// Types
+enum Types {
+ STRING_VAL = 1,
+ INTEGER_VAL,
+ BOOLEAN_VAL,
+ LONG_VAL,
+ DOUBLE_VAL,
+ TIMED_TEXT_VAL,
+ DATE_VAL,
+ BYTE_ARRAY_VAL,
+};
+
+const size_t kRecordHeaderSize = 3 * sizeof(int32_t);
+const int32_t kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A'
+
+} // anonymous namespace
+
+namespace android {
+namespace media {
+
+Metadata::Metadata(Parcel *p)
+ :mData(p),
+ mBegin(p->dataPosition()) { }
+
+Metadata::~Metadata() { }
+
+void Metadata::resetParcel()
+{
+ mData->setDataPosition(mBegin);
+}
+
+// Update the 4 bytes int at the beginning of the parcel which holds
+// the number of bytes written so far.
+void Metadata::updateLength()
+{
+ const size_t end = mData->dataPosition();
+
+ mData->setDataPosition(mBegin);
+ mData->writeInt32(end - mBegin);
+ mData->setDataPosition(end);
+}
+
+// Write the header. The java layer will look for the marker.
+bool Metadata::appendHeader()
+{
+ bool ok = true;
+
+ // Placeholder for the length of the metadata
+ ok = ok && mData->writeInt32(-1) == OK;
+ ok = ok && mData->writeInt32(kMetaMarker) == OK;
+ return ok;
+}
+
+bool Metadata::appendBool(int key, bool val)
+{
+ if (!checkKey(key)) {
+ return false;
+ }
+
+ const size_t begin = mData->dataPosition();
+ bool ok = true;
+
+ // 4 int32s: size, key, type, value.
+ ok = ok && mData->writeInt32(4 * sizeof(int32_t)) == OK;
+ ok = ok && mData->writeInt32(key) == OK;
+ ok = ok && mData->writeInt32(BOOLEAN_VAL) == OK;
+ ok = ok && mData->writeInt32(val ? 1 : 0) == OK;
+ if (!ok) {
+ mData->setDataPosition(begin);
+ }
+ return ok;
+}
+
+bool Metadata::appendInt32(int key, int32_t val)
+{
+ if (!checkKey(key)) {
+ return false;
+ }
+
+ const size_t begin = mData->dataPosition();
+ bool ok = true;
+
+ // 4 int32s: size, key, type, value.
+ ok = ok && mData->writeInt32(4 * sizeof(int32_t)) == OK;
+ ok = ok && mData->writeInt32(key) == OK;
+ ok = ok && mData->writeInt32(INTEGER_VAL) == OK;
+ ok = ok && mData->writeInt32(val) == OK;
+ if (!ok) {
+ mData->setDataPosition(begin);
+ }
+ return ok;
+}
+
+// Check the key (i.e metadata id) is valid if it is a system one.
+// Loop over all the exiting ones in the Parcel to check for duplicate
+// (not allowed).
+bool Metadata::checkKey(int key)
+{
+ if (key < FIRST_SYSTEM_ID ||
+ (LAST_SYSTEM_ID < key && key < FIRST_CUSTOM_ID)) {
+ LOGE("Bad key %d", key);
+ return false;
+ }
+ size_t curr = mData->dataPosition();
+ // Loop over the keys to check if it has been used already.
+ mData->setDataPosition(mBegin);
+
+ bool error = false;
+ size_t left = curr - mBegin;
+ while (left > 0) {
+ size_t pos = mData->dataPosition();
+ size_t size = mData->readInt32();
+ if (size < kRecordHeaderSize || size > left) {
+ error = true;
+ break;
+ }
+ if (mData->readInt32() == key) {
+ LOGE("Key exists already %d", key);
+ error = true;
+ break;
+ }
+ mData->setDataPosition(pos + size);
+ left -= size;
+ }
+ mData->setDataPosition(curr);
+ return !error;
+}
+
+} // namespace android::media
+} // namespace android
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index 5435da7..3ea64ae 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -1001,7 +1001,7 @@ bool ToneGenerator::initAudioTrack() {
// Open audio track in mono, PCM 16bit, default sampling rate, default buffer size
mpAudioTrack
- = new AudioTrack(mStreamType, 0, AudioSystem::PCM_16_BIT, 1, 0, 0, audioCallback, this, 0);
+ = new AudioTrack(mStreamType, 0, AudioSystem::PCM_16_BIT, AudioSystem::CHANNEL_OUT_MONO, 0, 0, audioCallback, this, 0);
if (mpAudioTrack == 0) {
LOGE("AudioTrack allocation failed");
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 09afc6c..d34a8ed 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -18,8 +18,8 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMetadataRetriever"
-#include <utils/IServiceManager.h>
-#include <utils/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
#include <media/mediametadataretriever.h>
#include <media/IMediaPlayerService.h>
#include <utils/Log.h>
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 24e3e6f..aeb43c5 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -24,13 +24,13 @@
#include <unistd.h>
#include <fcntl.h>
-#include <utils/IServiceManager.h>
-#include <utils/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
#include <media/mediaplayer.h>
#include <media/AudioTrack.h>
-#include <utils/MemoryBase.h>
+#include <binder/MemoryBase.h>
namespace android {
@@ -196,12 +196,47 @@ status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length)
return err;
}
+status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
+{
+ Mutex::Autolock _l(mLock);
+ if ((mPlayer != NULL) && ( mCurrentState & MEDIA_PLAYER_INITIALIZED ))
+ {
+ LOGV("invoke %d", request.dataSize());
+ return mPlayer->invoke(request, reply);
+ }
+ LOGE("invoke failed: wrong state %X", mCurrentState);
+ return INVALID_OPERATION;
+}
+
+status_t MediaPlayer::setMetadataFilter(const Parcel& filter)
+{
+ LOGD("setMetadataFilter");
+ Mutex::Autolock lock(mLock);
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+ return mPlayer->setMetadataFilter(filter);
+}
+
+status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *metadata)
+{
+ LOGD("getMetadata");
+ Mutex::Autolock lock(mLock);
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+ return mPlayer->getMetadata(update_only, apply_filter, metadata);
+}
+
status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
{
LOGV("setVideoSurface");
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- return mPlayer->setVideoSurface(surface->getISurface());
+ if (surface != NULL)
+ return mPlayer->setVideoSurface(surface->getISurface());
+ else
+ return mPlayer->setVideoSurface(NULL);
}
// must call with lock held
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 5093f0e..6b63931 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -20,7 +20,7 @@
#include <utils/Log.h>
#include <ui/Surface.h>
#include <media/mediarecorder.h>
-#include <utils/IServiceManager.h>
+#include <binder/IServiceManager.h>
#include <utils/String8.h>
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index f7f2490..f74ef3a 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -10,6 +10,7 @@ LOCAL_SRC_FILES:= \
MediaRecorderClient.cpp \
MediaPlayerService.cpp \
MetadataRetrieverClient.cpp \
+ TestPlayerStub.cpp \
VorbisPlayer.cpp \
MidiFile.cpp
@@ -20,6 +21,7 @@ endif
LOCAL_SHARED_LIBRARIES := \
libcutils \
libutils \
+ libbinder \
libvorbisidec \
libsonivox \
libopencore_player \
@@ -27,10 +29,27 @@ LOCAL_SHARED_LIBRARIES := \
libmedia \
libandroid_runtime
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_SHARED_LIBRARIES += libdl
+endif
+
LOCAL_C_INCLUDES := external/tremor/Tremor \
- $(call include-path-for, graphics corecg)
+ $(call include-path-for, graphics corecg) \
+ $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include
LOCAL_MODULE:= libmediaplayerservice
+ifeq ($(BUILD_WITH_STAGEFRIGHT),true)
+ LOCAL_SRC_FILES += StagefrightPlayer.cpp
+
+ LOCAL_SHARED_LIBRARIES += \
+ libstagefright \
+ libstagefright_omx
+
+ LOCAL_C_INCLUDES += $(TOP)/frameworks/base/media/libstagefright/omx
+
+ LOCAL_CFLAGS += -DBUILD_WITH_STAGEFRIGHT -DUSE_STAGEFRIGHT
+endif
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 31eecac..1d960c5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -27,18 +27,27 @@
#include <unistd.h>
#include <string.h>
+
#include <cutils/atomic.h>
+#include <cutils/properties.h> // for property_get
+
+#include <utils/misc.h>
#include <android_runtime/ActivityManager.h>
-#include <utils/IPCThreadState.h>
-#include <utils/IServiceManager.h>
-#include <utils/MemoryHeapBase.h>
-#include <utils/MemoryBase.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
+#include <utils/Errors.h> // for status_t
+#include <utils/String8.h>
+#include <utils/Vector.h>
#include <cutils/properties.h>
#include <media/MediaPlayerInterface.h>
#include <media/mediarecorder.h>
#include <media/MediaMetadataRetrieverInterface.h>
+#include <media/Metadata.h>
#include <media/AudioTrack.h>
#include "MediaRecorderClient.h"
@@ -48,6 +57,19 @@
#include "MidiFile.h"
#include "VorbisPlayer.h"
#include <media/PVPlayer.h>
+#include "TestPlayerStub.h"
+
+#if USE_STAGEFRIGHT
+#include "StagefrightPlayer.h"
+#endif
+
+#ifdef BUILD_WITH_STAGEFRIGHT
+#include <OMX.h>
+#else
+#include <media/IOMX.h>
+#endif
+
+
/* desktop Linux needs a little help with gettid() */
#if defined(HAVE_GETTID) && !defined(HAVE_ANDROID_OS)
@@ -61,6 +83,111 @@ pid_t gettid() { return syscall(__NR_gettid);}
#undef __KERNEL__
#endif
+namespace {
+using android::media::Metadata;
+using android::status_t;
+using android::OK;
+using android::BAD_VALUE;
+using android::NOT_ENOUGH_DATA;
+using android::Parcel;
+
+// Max number of entries in the filter.
+const int kMaxFilterSize = 64; // I pulled that out of thin air.
+
+// FIXME: Move all the metadata related function in the Metadata.cpp
+
+
+// Unmarshall a filter from a Parcel.
+// Filter format in a parcel:
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | number of entries (n) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | metadata type 1 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | metadata type 2 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// ....
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | metadata type n |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that should start with a filter.
+// @param[out] filter On exit contains the list of metadata type to be
+// filtered.
+// @param[out] status On exit contains the status code to be returned.
+// @return true if the parcel starts with a valid filter.
+bool unmarshallFilter(const Parcel& p,
+ Metadata::Filter *filter,
+ status_t *status)
+{
+ int32_t val;
+ if (p.readInt32(&val) != OK)
+ {
+ LOGE("Failed to read filter's length");
+ *status = NOT_ENOUGH_DATA;
+ return false;
+ }
+
+ if( val > kMaxFilterSize || val < 0)
+ {
+ LOGE("Invalid filter len %d", val);
+ *status = BAD_VALUE;
+ return false;
+ }
+
+ const size_t num = val;
+
+ filter->clear();
+ filter->setCapacity(num);
+
+ size_t size = num * sizeof(Metadata::Type);
+
+
+ if (p.dataAvail() < size)
+ {
+ LOGE("Filter too short expected %d but got %d", size, p.dataAvail());
+ *status = NOT_ENOUGH_DATA;
+ return false;
+ }
+
+ const Metadata::Type *data =
+ static_cast<const Metadata::Type*>(p.readInplace(size));
+
+ if (NULL == data)
+ {
+ LOGE("Filter had no data");
+ *status = BAD_VALUE;
+ return false;
+ }
+
+ // TODO: The stl impl of vector would be more efficient here
+ // because it degenerates into a memcpy on pod types. Try to
+ // replace later or use stl::set.
+ for (size_t i = 0; i < num; ++i)
+ {
+ filter->add(*data);
+ ++data;
+ }
+ *status = OK;
+ return true;
+}
+
+// @param filter Of metadata type.
+// @param val To be searched.
+// @return true if a match was found.
+bool findMetadata(const Metadata::Filter& filter, const int32_t val)
+{
+ // Deal with empty and ANY right away
+ if (filter.isEmpty()) return false;
+ if (filter[0] == Metadata::kAny) return true;
+
+ return filter.indexOf(val) >= 0;
+}
+
+} // anonymous namespace
+
namespace android {
@@ -70,6 +197,10 @@ typedef struct {
const player_type playertype;
} extmap;
extmap FILE_EXTS [] = {
+#if USE_STAGEFRIGHT
+ {".mp4", STAGEFRIGHT_PLAYER},
+ {".3gp", STAGEFRIGHT_PLAYER},
+#endif
{".mid", SONIVOX_PLAYER},
{".midi", SONIVOX_PLAYER},
{".smf", SONIVOX_PLAYER},
@@ -105,7 +236,11 @@ MediaPlayerService::~MediaPlayerService()
sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(pid_t pid)
{
+#ifndef NO_OPENCORE
sp<MediaRecorderClient> recorder = new MediaRecorderClient(pid);
+#else
+ sp<MediaRecorderClient> recorder = NULL;
+#endif
LOGV("Create new media recorder client from pid %d", pid);
return recorder;
}
@@ -151,6 +286,14 @@ sp<IMediaPlayer> MediaPlayerService::create(pid_t pid, const sp<IMediaPlayerClie
return c;
}
+sp<IOMX> MediaPlayerService::createOMX() {
+#ifdef BUILD_WITH_STAGEFRIGHT
+ return new OMX;
+#else
+ return NULL;
+#endif
+}
+
status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const
{
const size_t SIZE = 256;
@@ -457,6 +600,7 @@ void MediaPlayerService::Client::disconnect()
p = mPlayer;
}
mClient.clear();
+
mPlayer.clear();
// clear the notification to prevent callbacks to dead client
@@ -504,12 +648,19 @@ static player_type getPlayerType(int fd, int64_t offset, int64_t length)
EAS_Shutdown(easdata);
}
+#if USE_STAGEFRIGHT
+ return STAGEFRIGHT_PLAYER;
+#endif
+
// Fall through to PV
return PV_PLAYER;
}
static player_type getPlayerType(const char* url)
{
+ if (TestPlayerStub::canBeUsed(url)) {
+ return TEST_PLAYER;
+ }
// use MidiFile for MIDI extensions
int lenURL = strlen(url);
@@ -523,6 +674,10 @@ static player_type getPlayerType(const char* url)
}
}
+#if USE_STAGEFRIGHT
+ return STAGEFRIGHT_PLAYER;
+#endif
+
// Fall through to PV
return PV_PLAYER;
}
@@ -532,10 +687,12 @@ static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie,
{
sp<MediaPlayerBase> p;
switch (playerType) {
+#ifndef NO_OPENCORE
case PV_PLAYER:
LOGV(" create PVPlayer");
p = new PVPlayer();
break;
+#endif
case SONIVOX_PLAYER:
LOGV(" create MidiFile");
p = new MidiFile();
@@ -544,6 +701,21 @@ static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie,
LOGV(" create VorbisPlayer");
p = new VorbisPlayer();
break;
+#if USE_STAGEFRIGHT
+ case STAGEFRIGHT_PLAYER:
+ LOGV(" create StagefrightPlayer");
+ p = new StagefrightPlayer;
+ break;
+#else
+ case STAGEFRIGHT_PLAYER:
+ LOG_ALWAYS_FATAL(
+ "Should not be here, stagefright player not enabled.");
+ break;
+#endif
+ case TEST_PLAYER:
+ LOGV("Create Test Player stub");
+ p = new TestPlayerStub();
+ break;
}
if (p != NULL) {
if (p->initCheck() == NO_ERROR) {
@@ -608,7 +780,11 @@ status_t MediaPlayerService::Client::setDataSource(const char *url)
// now set data source
LOGV(" setDataSource");
mStatus = p->setDataSource(url);
- if (mStatus == NO_ERROR) mPlayer = p;
+ if (mStatus == NO_ERROR) {
+ mPlayer = p;
+ } else {
+ LOGE(" error: %d", mStatus);
+ }
return mStatus;
}
}
@@ -665,6 +841,73 @@ status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface
return p->setVideoSurface(surface);
}
+status_t MediaPlayerService::Client::invoke(const Parcel& request,
+ Parcel *reply)
+{
+ sp<MediaPlayerBase> p = getPlayer();
+ if (p == NULL) return UNKNOWN_ERROR;
+ return p->invoke(request, reply);
+}
+
+// This call doesn't need to access the native player.
+status_t MediaPlayerService::Client::setMetadataFilter(const Parcel& filter)
+{
+ status_t status;
+ media::Metadata::Filter allow, drop;
+
+ if (unmarshallFilter(filter, &allow, &status) &&
+ unmarshallFilter(filter, &drop, &status)) {
+ Mutex::Autolock lock(mLock);
+
+ mMetadataAllow = allow;
+ mMetadataDrop = drop;
+ }
+ return status;
+}
+
+status_t MediaPlayerService::Client::getMetadata(
+ bool update_only, bool apply_filter, Parcel *reply)
+{
+ sp<MediaPlayerBase> player = getPlayer();
+ if (player == 0) return UNKNOWN_ERROR;
+
+ status_t status;
+ // Placeholder for the return code, updated by the caller.
+ reply->writeInt32(-1);
+
+ media::Metadata::Filter ids;
+
+ // We don't block notifications while we fetch the data. We clear
+ // mMetadataUpdated first so we don't lose notifications happening
+ // during the rest of this call.
+ {
+ Mutex::Autolock lock(mLock);
+ if (update_only) {
+ ids = mMetadataUpdated;
+ }
+ mMetadataUpdated.clear();
+ }
+
+ media::Metadata metadata(reply);
+
+ metadata.appendHeader();
+ status = player->getMetadata(ids, reply);
+
+ if (status != OK) {
+ metadata.resetParcel();
+ LOGE("getMetadata failed %d", status);
+ return status;
+ }
+
+ // FIXME: Implement filtering on the result. Not critical since
+ // filtering takes place on the update notifications already. This
+ // would be when all the metadata are fetch and a filter is set.
+
+ // Everything is fine, update the metadata length.
+ metadata.updateLength();
+ return OK;
+}
+
status_t MediaPlayerService::Client::prepareAsync()
{
LOGV("[%d] prepareAsync", mConnId);
@@ -784,13 +1027,51 @@ status_t MediaPlayerService::Client::setVolume(float leftVolume, float rightVolu
return NO_ERROR;
}
+
void MediaPlayerService::Client::notify(void* cookie, int msg, int ext1, int ext2)
{
Client* client = static_cast<Client*>(cookie);
+
+ if (MEDIA_INFO == msg &&
+ MEDIA_INFO_METADATA_UPDATE == ext1) {
+ const media::Metadata::Type metadata_type = ext2;
+
+ if(client->shouldDropMetadata(metadata_type)) {
+ return;
+ }
+
+ // Update the list of metadata that have changed. getMetadata
+ // also access mMetadataUpdated and clears it.
+ client->addNewMetadataUpdate(metadata_type);
+ }
LOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
client->mClient->notify(msg, ext1, ext2);
}
+
+bool MediaPlayerService::Client::shouldDropMetadata(media::Metadata::Type code) const
+{
+ Mutex::Autolock lock(mLock);
+
+ if (findMetadata(mMetadataDrop, code)) {
+ return true;
+ }
+
+ if (mMetadataAllow.isEmpty() || findMetadata(mMetadataAllow, code)) {
+ return false;
+ } else {
+ return true;
+ }
+}
+
+
+void MediaPlayerService::Client::addNewMetadataUpdate(media::Metadata::Type metadata_type) {
+ Mutex::Autolock lock(mLock);
+ if (mMetadataUpdated.indexOf(metadata_type) < 0) {
+ mMetadataUpdated.add(metadata_type);
+ }
+}
+
#if CALLBACK_ANTAGONIZER
const int Antagonizer::interval = 10000; // 10 msecs
@@ -927,7 +1208,8 @@ Exit:
#undef LOG_TAG
#define LOG_TAG "AudioSink"
MediaPlayerService::AudioOutput::AudioOutput()
-{
+ : mCallback(NULL),
+ mCallbackCookie(NULL) {
mTrack = 0;
mStreamType = AudioSystem::MUSIC;
mLeftVolume = 1.0;
@@ -997,8 +1279,13 @@ float MediaPlayerService::AudioOutput::msecsPerFrame() const
return mMsecsPerFrame;
}
-status_t MediaPlayerService::AudioOutput::open(uint32_t sampleRate, int channelCount, int format, int bufferCount)
+status_t MediaPlayerService::AudioOutput::open(
+ uint32_t sampleRate, int channelCount, int format, int bufferCount,
+ AudioCallback cb, void *cookie)
{
+ mCallback = cb;
+ mCallbackCookie = cookie;
+
// Check argument "bufferCount" against the mininum buffer count
if (bufferCount < mMinBufferCount) {
LOGD("bufferCount (%d) is too small and increased to %d", bufferCount, mMinBufferCount);
@@ -1019,7 +1306,27 @@ status_t MediaPlayerService::AudioOutput::open(uint32_t sampleRate, int channelC
}
frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;
- AudioTrack *t = new AudioTrack(mStreamType, sampleRate, format, channelCount, frameCount);
+
+ AudioTrack *t;
+ if (mCallback != NULL) {
+ t = new AudioTrack(
+ mStreamType,
+ sampleRate,
+ format,
+ (channelCount == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO,
+ frameCount,
+ 0 /* flags */,
+ CallbackWrapper,
+ this);
+ } else {
+ t = new AudioTrack(
+ mStreamType,
+ sampleRate,
+ format,
+ (channelCount == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO,
+ frameCount);
+ }
+
if ((t == 0) || (t->initCheck() != NO_ERROR)) {
LOGE("Unable to create audio track");
delete t;
@@ -1045,6 +1352,8 @@ void MediaPlayerService::AudioOutput::start()
ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
{
+ LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+
//LOGV("write(%p, %u)", buffer, size);
if (mTrack) return mTrack->write(buffer, size);
return NO_INIT;
@@ -1085,6 +1394,20 @@ void MediaPlayerService::AudioOutput::setVolume(float left, float right)
}
}
+// static
+void MediaPlayerService::AudioOutput::CallbackWrapper(
+ int event, void *cookie, void *info) {
+ if (event != AudioTrack::EVENT_MORE_DATA) {
+ return;
+ }
+
+ AudioOutput *me = (AudioOutput *)cookie;
+ AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+
+ (*me->mCallback)(
+ me, buffer->raw, buffer->size, me->mCallbackCookie);
+}
+
#undef LOG_TAG
#define LOG_TAG "AudioCache"
MediaPlayerService::AudioCache::AudioCache(const char* name) :
@@ -1105,8 +1428,14 @@ float MediaPlayerService::AudioCache::msecsPerFrame() const
return mMsecsPerFrame;
}
-status_t MediaPlayerService::AudioCache::open(uint32_t sampleRate, int channelCount, int format, int bufferCount)
+status_t MediaPlayerService::AudioCache::open(
+ uint32_t sampleRate, int channelCount, int format, int bufferCount,
+ AudioCallback cb, void *cookie)
{
+ if (cb != NULL) {
+ return UNKNOWN_ERROR; // TODO: implement this.
+ }
+
LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);
if (mHeap->getHeapID() < 0) return NO_INIT;
mSampleRate = sampleRate;
@@ -1171,4 +1500,4 @@ void MediaPlayerService::AudioCache::notify(void* cookie, int msg, int ext1, int
p->mSignal.signal();
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index f138886..a4be414 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -18,17 +18,23 @@
#ifndef ANDROID_MEDIAPLAYERSERVICE_H
#define ANDROID_MEDIAPLAYERSERVICE_H
-#include <utils.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+#include <utils/Errors.h>
#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
#include <ui/SurfaceComposerClient.h>
#include <media/IMediaPlayerService.h>
#include <media/MediaPlayerInterface.h>
+#include <media/Metadata.h>
namespace android {
class IMediaRecorder;
class IMediaMetadataRetriever;
+class IOMX;
#define CALLBACK_ANTAGONIZER 0
#if CALLBACK_ANTAGONIZER
@@ -69,7 +75,12 @@ class MediaPlayerService : public BnMediaPlayerService
virtual ssize_t frameSize() const;
virtual uint32_t latency() const;
virtual float msecsPerFrame() const;
- virtual status_t open(uint32_t sampleRate, int channelCount, int format, int bufferCount=4);
+
+ virtual status_t open(
+ uint32_t sampleRate, int channelCount,
+ int format, int bufferCount,
+ AudioCallback cb, void *cookie);
+
virtual void start();
virtual ssize_t write(const void* buffer, size_t size);
virtual void stop();
@@ -84,8 +95,12 @@ class MediaPlayerService : public BnMediaPlayerService
static int getMinBufferCount();
private:
static void setMinBufferCount();
+ static void CallbackWrapper(
+ int event, void *me, void *info);
AudioTrack* mTrack;
+ AudioCallback mCallback;
+ void * mCallbackCookie;
int mStreamType;
float mLeftVolume;
float mRightVolume;
@@ -113,7 +128,12 @@ class MediaPlayerService : public BnMediaPlayerService
virtual ssize_t frameSize() const { return ssize_t(mChannelCount * ((mFormat == AudioSystem::PCM_16_BIT)?sizeof(int16_t):sizeof(u_int8_t))); }
virtual uint32_t latency() const;
virtual float msecsPerFrame() const;
- virtual status_t open(uint32_t sampleRate, int channelCount, int format, int bufferCount=1);
+
+ virtual status_t open(
+ uint32_t sampleRate, int channelCount, int format,
+ int bufferCount = 1,
+ AudioCallback cb = NULL, void *cookie = NULL);
+
virtual void start() {}
virtual ssize_t write(const void* buffer, size_t size);
virtual void stop() {}
@@ -140,7 +160,7 @@ class MediaPlayerService : public BnMediaPlayerService
sp<MemoryHeapBase> mHeap;
float mMsecsPerFrame;
uint16_t mChannelCount;
- uint16_t mFormat;
+ uint16_t mFormat;
ssize_t mFrameCount;
uint32_t mSampleRate;
uint32_t mSize;
@@ -160,11 +180,13 @@ public:
virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length);
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
+ virtual sp<IOMX> createOMX();
virtual status_t dump(int fd, const Vector<String16>& args);
void removeClient(wp<Client> client);
+
private:
class Client : public BnMediaPlayer {
@@ -184,6 +206,11 @@ private:
virtual status_t setAudioStreamType(int type);
virtual status_t setLooping(int loop);
virtual status_t setVolume(float leftVolume, float rightVolume);
+ virtual status_t invoke(const Parcel& request, Parcel *reply);
+ virtual status_t setMetadataFilter(const Parcel& filter);
+ virtual status_t getMetadata(bool update_only,
+ bool apply_filter,
+ Parcel *reply);
sp<MediaPlayerBase> createPlayer(player_type playerType);
status_t setDataSource(const char *url);
@@ -206,6 +233,18 @@ private:
sp<MediaPlayerBase> getPlayer() const { Mutex::Autolock lock(mLock); return mPlayer; }
+
+
+ // @param type Of the metadata to be tested.
+ // @return true if the metadata should be dropped according to
+ // the filters.
+ bool shouldDropMetadata(media::Metadata::Type type) const;
+
+ // Add a new element to the set of metadata updated. Noop if
+ // the element exists already.
+ // @param type Of the metadata to be recorded.
+ void addNewMetadataUpdate(media::Metadata::Type type);
+
mutable Mutex mLock;
sp<MediaPlayerBase> mPlayer;
sp<MediaPlayerService> mService;
@@ -215,6 +254,17 @@ private:
status_t mStatus;
bool mLoop;
int32_t mConnId;
+
+ // Metadata filters.
+ media::Metadata::Filter mMetadataAllow; // protected by mLock
+ media::Metadata::Filter mMetadataDrop; // protected by mLock
+
+ // Metadata updated. For each MEDIA_INFO_METADATA_UPDATE
+ // notification we try to update mMetadataUpdated which is a
+ // set: no duplicate.
+ // getMetadata clears this set.
+ media::Metadata::Filter mMetadataUpdated; // protected by mLock
+
#if CALLBACK_ANTAGONIZER
Antagonizer* mAntagonizer;
#endif
@@ -235,4 +285,3 @@ private:
}; // namespace android
#endif // ANDROID_MEDIAPLAYERSERVICE_H
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 8bc410c..e54f20d 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -25,10 +25,10 @@
#include <string.h>
#include <cutils/atomic.h>
#include <android_runtime/ActivityManager.h>
-#include <utils/IPCThreadState.h>
-#include <utils/IServiceManager.h>
-#include <utils/MemoryHeapBase.h>
-#include <utils/MemoryBase.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
#include <media/PVMediaRecorder.h>
#include <utils/String16.h>
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index a320bd5..ba8d9a8 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -26,10 +26,10 @@
#include <string.h>
#include <cutils/atomic.h>
-#include <utils/MemoryDealer.h>
+#include <binder/MemoryDealer.h>
#include <android_runtime/ActivityManager.h>
-#include <utils/IPCThreadState.h>
-#include <utils/IServiceManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/MediaPlayerInterface.h>
#include <media/PVMetadataRetriever.h>
@@ -49,7 +49,11 @@ MetadataRetrieverClient::MetadataRetrieverClient(pid_t pid)
mThumbnail = NULL;
mAlbumArt = NULL;
+#ifndef NO_OPENCORE
mRetriever = new PVMetadataRetriever();
+#else
+ mRetriever = NULL;
+#endif
if (mRetriever == NULL) {
LOGE("failed to initialize the retriever");
}
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index ce29c98..88d50bf 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -18,9 +18,12 @@
#ifndef ANDROID_MEDIAMETADATARETRIEVERSERVICE_H
#define ANDROID_MEDIAMETADATARETRIEVERSERVICE_H
-#include <utils.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+#include <utils/Errors.h>
#include <utils/KeyedVector.h>
-#include <utils/IMemory.h>
+#include <binder/IMemory.h>
#include <media/MediaMetadataRetrieverInterface.h>
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 302f1cf..25d4a1b 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -46,6 +46,9 @@ public:
virtual status_t reset();
virtual status_t setLooping(int loop);
virtual player_type playerType() { return SONIVOX_PLAYER; }
+ virtual status_t invoke(const Parcel& request, Parcel *reply) {
+ return INVALID_OPERATION;
+ }
private:
status_t createOutputTrack();
@@ -74,4 +77,3 @@ private:
}; // namespace android
#endif // ANDROID_MIDIFILE_H
-
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
new file mode 100644
index 0000000..9a06d13
--- /dev/null
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -0,0 +1,208 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "StagefrightPlayer"
+#include <utils/Log.h>
+
+#include "StagefrightPlayer.h"
+#include <media/stagefright/MediaPlayerImpl.h>
+
+namespace android {
+
+StagefrightPlayer::StagefrightPlayer()
+ : mPlayer(NULL) {
+ LOGV("StagefrightPlayer");
+}
+
+StagefrightPlayer::~StagefrightPlayer() {
+ LOGV("~StagefrightPlayer");
+ reset();
+ LOGV("~StagefrightPlayer done.");
+}
+
+status_t StagefrightPlayer::initCheck() {
+ LOGV("initCheck");
+ return OK;
+}
+
+status_t StagefrightPlayer::setDataSource(const char *url) {
+ LOGV("setDataSource('%s')", url);
+
+ reset();
+ mPlayer = new MediaPlayerImpl(url);
+
+ status_t err = mPlayer->initCheck();
+ if (err != OK) {
+ delete mPlayer;
+ mPlayer = NULL;
+ } else {
+ mPlayer->setAudioSink(mAudioSink);
+ }
+
+ return err;
+}
+
+status_t StagefrightPlayer::setDataSource(int fd, int64_t offset, int64_t length) {
+ LOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+
+ reset();
+ mPlayer = new MediaPlayerImpl(fd, offset, length);
+
+ status_t err = mPlayer->initCheck();
+ if (err != OK) {
+ delete mPlayer;
+ mPlayer = NULL;
+ } else {
+ mPlayer->setAudioSink(mAudioSink);
+ }
+
+ return err;
+}
+
+status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
+ LOGV("setVideoSurface");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ mPlayer->setISurface(surface);
+
+ return OK;
+}
+
+status_t StagefrightPlayer::prepare() {
+ LOGV("prepare");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ sendEvent(
+ MEDIA_SET_VIDEO_SIZE,
+ mPlayer->getWidth(), mPlayer->getHeight());
+
+ return OK;
+}
+
+status_t StagefrightPlayer::prepareAsync() {
+ LOGV("prepareAsync");
+
+ status_t err = prepare();
+
+ if (err != OK) {
+ return err;
+ }
+
+ sendEvent(MEDIA_PREPARED);
+
+ return OK;
+}
+
+status_t StagefrightPlayer::start() {
+ LOGV("start");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ mPlayer->play();
+
+ return OK;
+}
+
+status_t StagefrightPlayer::stop() {
+ LOGV("stop");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ reset();
+
+ return OK;
+}
+
+status_t StagefrightPlayer::pause() {
+ LOGV("pause");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ mPlayer->pause();
+
+ return OK;
+}
+
+bool StagefrightPlayer::isPlaying() {
+ LOGV("isPlaying");
+ return mPlayer != NULL && mPlayer->isPlaying();
+}
+
+status_t StagefrightPlayer::seekTo(int msec) {
+ LOGV("seekTo");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ status_t err = mPlayer->seekTo((int64_t)msec * 1000);
+
+ sendEvent(MEDIA_SEEK_COMPLETE);
+
+ return err;
+}
+
+status_t StagefrightPlayer::getCurrentPosition(int *msec) {
+ LOGV("getCurrentPosition");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ *msec = mPlayer->getPosition() / 1000;
+ return OK;
+}
+
+status_t StagefrightPlayer::getDuration(int *msec) {
+ LOGV("getDuration");
+
+ if (mPlayer == NULL) {
+ return NO_INIT;
+ }
+
+ *msec = mPlayer->getDuration() / 1000;
+ return OK;
+}
+
+status_t StagefrightPlayer::reset() {
+ LOGV("reset");
+
+ delete mPlayer;
+ mPlayer = NULL;
+
+ return OK;
+}
+
+status_t StagefrightPlayer::setLooping(int loop) {
+ LOGV("setLooping");
+ return UNKNOWN_ERROR;
+}
+
+player_type StagefrightPlayer::playerType() {
+ LOGV("playerType");
+ return STAGEFRIGHT_PLAYER;
+}
+
+status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) {
+ return INVALID_OPERATION;
+}
+
+void StagefrightPlayer::setAudioSink(const sp<AudioSink> &audioSink) {
+ MediaPlayerInterface::setAudioSink(audioSink);
+
+ if (mPlayer != NULL) {
+ mPlayer->setAudioSink(audioSink);
+ }
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
new file mode 100644
index 0000000..f214872
--- /dev/null
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -0,0 +1,60 @@
+/*
+**
+** Copyright 2009, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_STAGEFRIGHTPLAYER_H
+#define ANDROID_STAGEFRIGHTPLAYER_H
+
+#include <media/MediaPlayerInterface.h>
+
+namespace android {
+
+class MediaPlayerImpl;
+
+class StagefrightPlayer : public MediaPlayerInterface {
+public:
+ StagefrightPlayer();
+ virtual ~StagefrightPlayer();
+
+ virtual status_t initCheck();
+ virtual status_t setDataSource(const char *url);
+ virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+ virtual status_t setVideoSurface(const sp<ISurface> &surface);
+ virtual status_t prepare();
+ virtual status_t prepareAsync();
+ virtual status_t start();
+ virtual status_t stop();
+ virtual status_t pause();
+ virtual bool isPlaying();
+ virtual status_t seekTo(int msec);
+ virtual status_t getCurrentPosition(int *msec);
+ virtual status_t getDuration(int *msec);
+ virtual status_t reset();
+ virtual status_t setLooping(int loop);
+ virtual player_type playerType();
+ virtual status_t invoke(const Parcel &request, Parcel *reply);
+ virtual void setAudioSink(const sp<AudioSink> &audioSink);
+
+private:
+ MediaPlayerImpl *mPlayer;
+
+ StagefrightPlayer(const StagefrightPlayer &);
+ StagefrightPlayer &operator=(const StagefrightPlayer &);
+};
+
+} // namespace android
+
+#endif // ANDROID_STAGEFRIGHTPLAYER_H
diff --git a/media/libmediaplayerservice/TestPlayerStub.cpp b/media/libmediaplayerservice/TestPlayerStub.cpp
new file mode 100644
index 0000000..8627708
--- /dev/null
+++ b/media/libmediaplayerservice/TestPlayerStub.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TestPlayerStub"
+#include "utils/Log.h"
+
+#include "TestPlayerStub.h"
+
+#include <dlfcn.h> // for dlopen/dlclose
+#include <stdlib.h>
+#include <string.h>
+#include <cutils/properties.h>
+#include <utils/Errors.h> // for status_t
+
+#include "media/MediaPlayerInterface.h"
+
+
+namespace {
+using android::status_t;
+using android::MediaPlayerBase;
+
+const char *kTestUrlScheme = "test:";
+const char *kUrlParam = "url=";
+
+const char *kBuildTypePropName = "ro.build.type";
+const char *kEngBuild = "eng";
+const char *kTestBuild = "test";
+
+// @return true if the current build is 'eng' or 'test'.
+bool isTestBuild()
+{
+ char prop[PROPERTY_VALUE_MAX] = { '\0', };
+
+ property_get(kBuildTypePropName, prop, '\0');
+ return strcmp(prop, kEngBuild) == 0 || strcmp(prop, kTestBuild) == 0;
+}
+
+// @return true if the url scheme is 'test:'
+bool isTestUrl(const char *url)
+{
+ return url && strncmp(url, kTestUrlScheme, strlen(kTestUrlScheme)) == 0;
+}
+
+} // anonymous namespace
+
+namespace android {
+
+TestPlayerStub::TestPlayerStub()
+ :mUrl(NULL), mFilename(NULL), mContentUrl(NULL),
+ mHandle(NULL), mNewPlayer(NULL), mDeletePlayer(NULL),
+ mPlayer(NULL) { }
+
+TestPlayerStub::~TestPlayerStub()
+{
+ resetInternal();
+}
+
+status_t TestPlayerStub::initCheck()
+{
+ return isTestBuild() ? OK : INVALID_OPERATION;
+}
+
+// Parse mUrl to get:
+// * The library to be dlopened.
+// * The url to be passed to the real setDataSource impl.
+//
+// mUrl is expected to be in following format:
+//
+// test:<name of the .so>?url=<url for setDataSource>
+//
+// The value of the url parameter is treated as a string (no
+// unescaping of illegal charaters).
+status_t TestPlayerStub::parseUrl()
+{
+ if (strlen(mUrl) < strlen(kTestUrlScheme)) {
+ resetInternal();
+ return BAD_VALUE;
+ }
+
+ char *i = mUrl + strlen(kTestUrlScheme);
+
+ mFilename = i;
+
+ while (*i != '\0' && *i != '?') {
+ ++i;
+ }
+
+ if (*i == '\0' || strncmp(i + 1, kUrlParam, strlen(kUrlParam)) != 0) {
+ resetInternal();
+ return BAD_VALUE;
+ }
+ *i = '\0'; // replace '?' to nul-terminate mFilename
+
+ mContentUrl = i + 1 + strlen(kUrlParam);
+ return OK;
+}
+
+// Load the dynamic library.
+// Create the test player.
+// Call setDataSource on the test player with the url in param.
+status_t TestPlayerStub::setDataSource(const char *url)
+{
+ if (!isTestUrl(url) || NULL != mHandle) {
+ return INVALID_OPERATION;
+ }
+
+ mUrl = strdup(url);
+
+ status_t status = parseUrl();
+
+ if (OK != status) {
+ resetInternal();
+ return status;
+ }
+
+ ::dlerror(); // Clears any pending error.
+
+ // Load the test player from the url. dlopen will fail if the lib
+ // is not there. dls are under /system/lib
+ // None of the entry points should be NULL.
+ mHandle = ::dlopen(mFilename, RTLD_NOW | RTLD_GLOBAL);
+ if (!mHandle) {
+ LOGE("dlopen failed: %s", ::dlerror());
+ resetInternal();
+ return UNKNOWN_ERROR;
+ }
+
+ // Load the 2 entry points to create and delete instances.
+ const char *err;
+ mNewPlayer = reinterpret_cast<NEW_PLAYER>(dlsym(mHandle,
+ "newPlayer"));
+ err = ::dlerror();
+ if (err || mNewPlayer == NULL) {
+ // if err is NULL the string <null> is inserted in the logs =>
+ // mNewPlayer was NULL.
+ LOGE("dlsym for newPlayer failed %s", err);
+ resetInternal();
+ return UNKNOWN_ERROR;
+ }
+
+ mDeletePlayer = reinterpret_cast<DELETE_PLAYER>(dlsym(mHandle,
+ "deletePlayer"));
+ err = ::dlerror();
+ if (err || mDeletePlayer == NULL) {
+ LOGE("dlsym for deletePlayer failed %s", err);
+ resetInternal();
+ return UNKNOWN_ERROR;
+ }
+
+ mPlayer = (*mNewPlayer)();
+ return mPlayer->setDataSource(mContentUrl);
+}
+
+// Internal cleanup.
+status_t TestPlayerStub::resetInternal()
+{
+ if(mUrl) {
+ free(mUrl);
+ mUrl = NULL;
+ }
+ mFilename = NULL;
+ mContentUrl = NULL;
+
+ if (mPlayer) {
+ LOG_ASSERT(mDeletePlayer != NULL);
+ (*mDeletePlayer)(mPlayer);
+ mPlayer = NULL;
+ }
+
+ if (mHandle) {
+ ::dlclose(mHandle);
+ mHandle = NULL;
+ }
+ return OK;
+}
+
+/* static */ bool TestPlayerStub::canBeUsed(const char *url)
+{
+ return isTestBuild() && isTestUrl(url);
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
new file mode 100644
index 0000000..80d53a8
--- /dev/null
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIAPLAYERSERVICE_TESTPLAYERSTUB_H__
+#define ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIAPLAYERSERVICE_TESTPLAYERSTUB_H__
+
+#include <media/MediaPlayerInterface.h>
+#include <utils/Errors.h>
+
+namespace android {
+class MediaPlayerBase; // in media/MediaPlayerInterface.h
+
+// Wrapper around a test media player that gets dynamically loaded.
+//
+// The URL passed to setDataSource has this format:
+//
+// test:<name of the .so>?url=<url for the real setDataSource impl.>
+//
+// e.g:
+// test:invoke_test_media_player.so?url=http://youtube.com/
+// test:invoke_test_media_player.so?url=speedtest
+//
+// TestPlayerStub::setDataSource loads the library in the test url. 2
+// entry points with C linkage are expected. One to create the test
+// player and one to destroy it.
+//
+// extern "C" android::MediaPlayerBase* newPlayer();
+// extern "C" android::status_t deletePlayer(android::MediaPlayerBase *p);
+//
+// Once the test player has been loaded, its setDataSource
+// implementation is called with the value of the 'url' parameter.
+//
+// typical usage in a java test:
+// ============================
+//
+// MediaPlayer p = new MediaPlayer();
+// p.setDataSource("test:invoke_mock_media_player.so?url=http://youtube.com");
+// p.prepare();
+// ...
+// p.release();
+
+class TestPlayerStub : public MediaPlayerInterface {
+ public:
+ typedef MediaPlayerBase* (*NEW_PLAYER)();
+ typedef status_t (*DELETE_PLAYER)(MediaPlayerBase *);
+
+ TestPlayerStub();
+ virtual ~TestPlayerStub();
+
+ // Called right after the constructor. Check if the current build
+ // allows test players.
+ virtual status_t initCheck();
+
+ // @param url Should be a test url. See class comment.
+ virtual status_t setDataSource(const char* url);
+
+ // Test player for a file descriptor source is not supported.
+ virtual status_t setDataSource(int, int64_t, int64_t) {
+ return INVALID_OPERATION;
+ }
+
+
+ // All the methods below wrap the mPlayer instance.
+ virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) {
+ return mPlayer->setVideoSurface(s);
+ }
+ virtual status_t prepare() {return mPlayer->prepare();}
+ virtual status_t prepareAsync() {return mPlayer->prepareAsync();}
+ virtual status_t start() {return mPlayer->start();}
+ virtual status_t stop() {return mPlayer->stop();}
+ virtual status_t pause() {return mPlayer->pause();}
+ virtual bool isPlaying() {return mPlayer->isPlaying();}
+ virtual status_t seekTo(int msec) {return mPlayer->seekTo(msec);}
+ virtual status_t getCurrentPosition(int *p) {
+ return mPlayer->getCurrentPosition(p);
+ }
+ virtual status_t getDuration(int *d) {return mPlayer->getDuration(d);}
+ virtual status_t reset() {return mPlayer->reset();}
+ virtual status_t setLooping(int b) {return mPlayer->setLooping(b);}
+ virtual player_type playerType() {return mPlayer->playerType();}
+ virtual status_t invoke(const android::Parcel& in, android::Parcel *out) {
+ return mPlayer->invoke(in, out);
+ }
+
+
+ // @return true if the current build is 'eng' or 'test' and the
+ // url's scheme is 'test:'
+ static bool canBeUsed(const char *url);
+
+ private:
+ // Release the player, dlclose the library.
+ status_t resetInternal();
+ status_t parseUrl();
+
+ char *mUrl; // test:foo.so?url=http://bar
+ char *mFilename; // foo.so
+ char *mContentUrl; // http://bar
+ void *mHandle; // returned by dlopen
+ NEW_PLAYER mNewPlayer;
+ DELETE_PLAYER mDeletePlayer;
+ MediaPlayerBase *mPlayer; // wrapped player
+};
+
+} // namespace android
+
+#endif
diff --git a/media/libmediaplayerservice/VorbisPlayer.h b/media/libmediaplayerservice/VorbisPlayer.h
index c30dc1b..4024654 100644
--- a/media/libmediaplayerservice/VorbisPlayer.h
+++ b/media/libmediaplayerservice/VorbisPlayer.h
@@ -53,6 +53,7 @@ public:
virtual status_t reset();
virtual status_t setLooping(int loop);
virtual player_type playerType() { return VORBIS_PLAYER; }
+ virtual status_t invoke(const Parcel& request, Parcel *reply) {return INVALID_OPERATION;}
private:
status_t setdatasource(const char *path, int fd, int64_t offset, int64_t length);
@@ -88,4 +89,3 @@ private:
}; // namespace android
#endif // ANDROID_VORBISPLAYER_H
-
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
new file mode 100644
index 0000000..5944d9c
--- /dev/null
+++ b/media/libstagefright/Android.mk
@@ -0,0 +1,60 @@
+ifeq ($(BUILD_WITH_STAGEFRIGHT),true)
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ CachingDataSource.cpp \
+ DataSource.cpp \
+ FileSource.cpp \
+ HTTPDataSource.cpp \
+ HTTPStream.cpp \
+ MP3Extractor.cpp \
+ MPEG4Extractor.cpp \
+ MPEG4Writer.cpp \
+ MediaBuffer.cpp \
+ MediaBufferGroup.cpp \
+ MediaExtractor.cpp \
+ MediaPlayerImpl.cpp \
+ MediaSource.cpp \
+ MetaData.cpp \
+ MmapSource.cpp \
+ QComHardwareRenderer.cpp \
+ SampleTable.cpp \
+ ShoutcastSource.cpp \
+ SoftwareRenderer.cpp \
+ SurfaceRenderer.cpp \
+ TimeSource.cpp \
+ TimedEventQueue.cpp \
+ Utils.cpp \
+ AudioPlayer.cpp \
+ ESDS.cpp \
+ OMXClient.cpp \
+ OMXDecoder.cpp \
+ string.cpp
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \
+ $(TOP)/external/opencore/android
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libmedia \
+ libutils \
+ libcutils \
+ libui
+
+ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
+ LOCAL_LDLIBS += -lpthread
+endif
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_PRELINK_MODULE:= false
+
+LOCAL_MODULE:= libstagefright
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
+endif
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
new file mode 100644
index 0000000..17c72b9
--- /dev/null
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef NDEBUG
+#include <assert.h>
+
+#define LOG_TAG "AudioPlayer"
+#include <utils/Log.h>
+
+#include <media/AudioTrack.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+AudioPlayer::AudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink)
+ : mSource(NULL),
+ mAudioTrack(NULL),
+ mInputBuffer(NULL),
+ mSampleRate(0),
+ mLatencyUs(0),
+ mFrameSize(0),
+ mNumFramesPlayed(0),
+ mPositionTimeMediaUs(-1),
+ mPositionTimeRealUs(-1),
+ mSeeking(false),
+ mStarted(false),
+ mAudioSink(audioSink) {
+}
+
+AudioPlayer::~AudioPlayer() {
+ if (mStarted) {
+ stop();
+ }
+}
+
+void AudioPlayer::setSource(MediaSource *source) {
+ assert(mSource == NULL);
+ mSource = source;
+}
+
+void AudioPlayer::start() {
+ assert(!mStarted);
+ assert(mSource != NULL);
+
+ status_t err = mSource->start();
+ assert(err == OK);
+
+ sp<MetaData> format = mSource->getFormat();
+ const char *mime;
+ bool success = format->findCString(kKeyMIMEType, &mime);
+ assert(success);
+ assert(!strcasecmp(mime, "audio/raw"));
+
+ success = format->findInt32(kKeySampleRate, &mSampleRate);
+ assert(success);
+
+ int32_t numChannels;
+ success = format->findInt32(kKeyChannelCount, &numChannels);
+ assert(success);
+
+ if (mAudioSink.get() != NULL) {
+ status_t err = mAudioSink->open(
+ mSampleRate, numChannels, AudioSystem::PCM_16_BIT,
+ DEFAULT_AUDIOSINK_BUFFERCOUNT,
+ &AudioPlayer::AudioSinkCallback, this);
+ assert(err == OK);
+
+ mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
+ mFrameSize = mAudioSink->frameSize();
+
+ mAudioSink->start();
+ } else {
+ mAudioTrack = new AudioTrack(
+ AudioSystem::MUSIC, mSampleRate, AudioSystem::PCM_16_BIT,
+ numChannels, 8192, 0, &AudioCallback, this, 0);
+
+ assert(mAudioTrack->initCheck() == OK);
+
+ mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
+ mFrameSize = mAudioTrack->frameSize();
+
+ mAudioTrack->start();
+ }
+
+ mStarted = true;
+}
+
+void AudioPlayer::pause() {
+ assert(mStarted);
+
+ if (mAudioSink.get() != NULL) {
+ mAudioSink->pause();
+ } else {
+ mAudioTrack->stop();
+ }
+}
+
+void AudioPlayer::resume() {
+ assert(mStarted);
+
+ if (mAudioSink.get() != NULL) {
+ mAudioSink->start();
+ } else {
+ mAudioTrack->start();
+ }
+}
+
+void AudioPlayer::stop() {
+ assert(mStarted);
+
+ if (mAudioSink.get() != NULL) {
+ mAudioSink->stop();
+ } else {
+ mAudioTrack->stop();
+
+ delete mAudioTrack;
+ mAudioTrack = NULL;
+ }
+
+ // Make sure to release any buffer we hold onto so that the
+ // source is able to stop().
+ if (mInputBuffer != NULL) {
+ LOGI("AudioPlayer releasing input buffer.");
+
+ mInputBuffer->release();
+ mInputBuffer = NULL;
+ }
+
+ mSource->stop();
+
+ mNumFramesPlayed = 0;
+ mPositionTimeMediaUs = -1;
+ mPositionTimeRealUs = -1;
+ mSeeking = false;
+ mStarted = false;
+}
+
+// static
+void AudioPlayer::AudioCallback(int event, void *user, void *info) {
+ static_cast<AudioPlayer *>(user)->AudioCallback(event, info);
+}
+
+// static
+void AudioPlayer::AudioSinkCallback(
+ MediaPlayerBase::AudioSink *audioSink,
+ void *buffer, size_t size, void *cookie) {
+ AudioPlayer *me = (AudioPlayer *)cookie;
+
+ me->fillBuffer(buffer, size);
+}
+
+void AudioPlayer::AudioCallback(int event, void *info) {
+ if (event != AudioTrack::EVENT_MORE_DATA) {
+ return;
+ }
+
+ AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+ fillBuffer(buffer->raw, buffer->size);
+}
+
+void AudioPlayer::fillBuffer(void *data, size_t size) {
+ if (mNumFramesPlayed == 0) {
+ LOGI("AudioCallback");
+ }
+
+ size_t size_done = 0;
+ size_t size_remaining = size;
+ while (size_remaining > 0) {
+ MediaSource::ReadOptions options;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mSeeking) {
+ options.setSeekTo(mSeekTimeUs);
+
+ if (mInputBuffer != NULL) {
+ mInputBuffer->release();
+ mInputBuffer = NULL;
+ }
+ mSeeking = false;
+ }
+ }
+
+ if (mInputBuffer == NULL) {
+ status_t err = mSource->read(&mInputBuffer, &options);
+
+ assert((err == OK && mInputBuffer != NULL)
+ || (err != OK && mInputBuffer == NULL));
+
+ if (err != OK) {
+ memset((char *)data + size_done, 0, size_remaining);
+ break;
+ }
+
+ int32_t units, scale;
+ bool success =
+ mInputBuffer->meta_data()->findInt32(kKeyTimeUnits, &units);
+ success = success &&
+ mInputBuffer->meta_data()->findInt32(kKeyTimeScale, &scale);
+ assert(success);
+
+ Mutex::Autolock autoLock(mLock);
+ mPositionTimeMediaUs = (int64_t)units * 1000000 / scale;
+ mPositionTimeRealUs =
+ ((mNumFramesPlayed + size_done / 4) * 1000000) / mSampleRate; // XXX
+ }
+
+ if (mInputBuffer->range_length() == 0) {
+ mInputBuffer->release();
+ mInputBuffer = NULL;
+
+ continue;
+ }
+
+ size_t copy = size_remaining;
+ if (copy > mInputBuffer->range_length()) {
+ copy = mInputBuffer->range_length();
+ }
+
+ memcpy((char *)data + size_done,
+ (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
+ copy);
+
+ mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
+ mInputBuffer->range_length() - copy);
+
+ size_done += copy;
+ size_remaining -= copy;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ mNumFramesPlayed += size / mFrameSize;
+}
+
+int64_t AudioPlayer::getRealTimeUs() {
+ Mutex::Autolock autoLock(mLock);
+ return getRealTimeUsLocked();
+}
+
+int64_t AudioPlayer::getRealTimeUsLocked() const {
+ return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate;
+}
+
+int64_t AudioPlayer::getMediaTimeUs() {
+ Mutex::Autolock autoLock(mLock);
+
+ return mPositionTimeMediaUs + (getRealTimeUsLocked() - mPositionTimeRealUs);
+}
+
+bool AudioPlayer::getMediaTimeMapping(
+ int64_t *realtime_us, int64_t *mediatime_us) {
+ Mutex::Autolock autoLock(mLock);
+
+ *realtime_us = mPositionTimeRealUs;
+ *mediatime_us = mPositionTimeMediaUs;
+
+ return mPositionTimeRealUs != -1 || mPositionTimeMediaUs != -1;
+}
+
+status_t AudioPlayer::seekTo(int64_t time_us) {
+ Mutex::Autolock autoLock(mLock);
+
+ mSeeking = true;
+ mSeekTimeUs = time_us;
+
+ return OK;
+}
+
+}
diff --git a/media/libstagefright/CachingDataSource.cpp b/media/libstagefright/CachingDataSource.cpp
new file mode 100644
index 0000000..0fd71d5
--- /dev/null
+++ b/media/libstagefright/CachingDataSource.cpp
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/CachingDataSource.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <stdlib.h>
+#include <string.h>
+
+namespace android {
+
+CachingDataSource::CachingDataSource(
+ DataSource *source, size_t pageSize, int numPages)
+ : mSource(source),
+ mData(malloc(pageSize * numPages)),
+ mPageSize(pageSize),
+ mFirst(NULL),
+ mLast(NULL) {
+ for (int i = 0; i < numPages; ++i) {
+ Page *page = new Page;
+ page->mPrev = mLast;
+ page->mNext = NULL;
+
+ if (mLast == NULL) {
+ mFirst = page;
+ } else {
+ mLast->mNext = page;
+ }
+
+ mLast = page;
+
+ page->mOffset = -1;
+ page->mLength = 0;
+ page->mData = (char *)mData + mPageSize * i;
+ }
+}
+
+CachingDataSource::~CachingDataSource() {
+ Page *page = mFirst;
+ while (page != NULL) {
+ Page *next = page->mNext;
+ delete page;
+ page = next;
+ }
+ mFirst = mLast = NULL;
+
+ free(mData);
+ mData = NULL;
+
+ delete mSource;
+ mSource = NULL;
+}
+
+status_t CachingDataSource::InitCheck() const {
+ return OK;
+}
+
+ssize_t CachingDataSource::read_at(off_t offset, void *data, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ size_t total = 0;
+ while (size > 0) {
+ Page *page = mFirst;
+ while (page != NULL) {
+ if (page->mOffset >= 0 && offset >= page->mOffset
+ && offset < page->mOffset + page->mLength) {
+ break;
+ }
+ page = page->mNext;
+ }
+
+ if (page == NULL) {
+ page = allocate_page();
+ page->mOffset = offset - offset % mPageSize;
+ ssize_t n = mSource->read_at(page->mOffset, page->mData, mPageSize);
+ if (n < 0) {
+ page->mLength = 0;
+ } else {
+ page->mLength = (size_t)n;
+ }
+ mFirst->mPrev = page;
+ page->mNext = mFirst;
+ page->mPrev = NULL;
+ mFirst = page;
+
+ if (n < 0) {
+ return n;
+ }
+
+ if (offset >= page->mOffset + page->mLength) {
+ break;
+ }
+ } else {
+ // Move "page" to the front in LRU order.
+ if (page->mNext != NULL) {
+ page->mNext->mPrev = page->mPrev;
+ } else {
+ mLast = page->mPrev;
+ }
+
+ if (page->mPrev != NULL) {
+ page->mPrev->mNext = page->mNext;
+ } else {
+ mFirst = page->mNext;
+ }
+
+ mFirst->mPrev = page;
+ page->mNext = mFirst;
+ page->mPrev = NULL;
+ mFirst = page;
+ }
+
+ size_t copy = page->mLength - (offset - page->mOffset);
+ if (copy > size) {
+ copy = size;
+ }
+ memcpy(data,(const char *)page->mData + (offset - page->mOffset),
+ copy);
+
+ total += copy;
+
+ if (page->mLength < mPageSize) {
+ // This was the final page. There is no more data beyond it.
+ break;
+ }
+
+ offset += copy;
+ size -= copy;
+ data = (char *)data + copy;
+ }
+
+ return total;
+}
+
+CachingDataSource::Page *CachingDataSource::allocate_page() {
+ // The last page is the least recently used, i.e. oldest.
+
+ Page *page = mLast;
+
+ page->mPrev->mNext = NULL;
+ mLast = page->mPrev;
+ page->mPrev = NULL;
+
+ return page;
+}
+
+} // namespace android
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
new file mode 100644
index 0000000..ee12873
--- /dev/null
+++ b/media/libstagefright/CameraSource.cpp
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/time.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <OMX_Component.h>
+
+#include <binder/IServiceManager.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <ui/ICameraClient.h>
+#include <ui/ICameraService.h>
+#include <ui/Overlay.h>
+#include <utils/String16.h>
+
+namespace android {
+
+class CameraBuffer : public MediaBuffer {
+public:
+ CameraBuffer(const sp<IMemory> &frame)
+ : MediaBuffer(frame->pointer(), frame->size()),
+ mFrame(frame) {
+ }
+
+ sp<IMemory> releaseFrame() {
+ sp<IMemory> frame = mFrame;
+ mFrame.clear();
+ return frame;
+ }
+
+private:
+ sp<IMemory> mFrame;
+};
+
+class CameraSourceClient : public BnCameraClient {
+public:
+ CameraSourceClient()
+ : mSource(NULL) {
+ }
+
+ virtual void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) {
+ assert(mSource != NULL);
+ mSource->notifyCallback(msgType, ext1, ext2);
+ }
+
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ assert(mSource != NULL);
+ mSource->dataCallback(msgType, data);
+ }
+
+ void setCameraSource(CameraSource *source) {
+ mSource = source;
+ }
+
+private:
+ CameraSource *mSource;
+};
+
+class DummySurface : public BnSurface {
+public:
+ DummySurface() {}
+
+ virtual status_t registerBuffers(const BufferHeap &buffers) {
+ return OK;
+ }
+
+ virtual void postBuffer(ssize_t offset) {
+ }
+
+ virtual void unregisterBuffers() {
+ }
+
+ virtual sp<OverlayRef> createOverlay(
+ uint32_t w, uint32_t h, int32_t format) {
+ return NULL;
+ }
+};
+
+// static
+CameraSource *CameraSource::Create() {
+ sp<IServiceManager> sm = defaultServiceManager();
+
+ sp<ICameraService> service =
+ interface_cast<ICameraService>(
+ sm->getService(String16("media.camera")));
+
+ sp<CameraSourceClient> client = new CameraSourceClient;
+ sp<ICamera> camera = service->connect(client);
+
+ CameraSource *source = new CameraSource(camera, client);
+ client->setCameraSource(source);
+
+ return source;
+}
+
+CameraSource::CameraSource(
+ const sp<ICamera> &camera, const sp<ICameraClient> &client)
+ : mCamera(camera),
+ mCameraClient(client),
+ mNumFrames(0),
+ mStarted(false) {
+ printf("params: \"%s\"\n", mCamera->getParameters().string());
+}
+
+CameraSource::~CameraSource() {
+ if (mStarted) {
+ stop();
+ }
+
+ mCamera->disconnect();
+}
+
+status_t CameraSource::start(MetaData *) {
+ assert(!mStarted);
+
+ status_t err = mCamera->lock();
+ assert(err == OK);
+
+ err = mCamera->setPreviewDisplay(new DummySurface);
+ assert(err == OK);
+ mCamera->setPreviewCallbackFlag(1);
+ mCamera->startPreview();
+ assert(err == OK);
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t CameraSource::stop() {
+ assert(mStarted);
+
+ mCamera->stopPreview();
+ mCamera->unlock();
+
+ mStarted = false;
+
+ return OK;
+}
+
+sp<MetaData> CameraSource::getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setCString(kKeyMIMEType, "video/raw");
+ meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar);
+ meta->setInt32(kKeyWidth, 480);
+ meta->setInt32(kKeyHeight, 320);
+
+ return meta;
+}
+
+status_t CameraSource::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ assert(mStarted);
+
+ *buffer = NULL;
+
+ int64_t seekTimeUs;
+ if (options && options->getSeekTo(&seekTimeUs)) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ sp<IMemory> frame;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ while (mFrames.empty()) {
+ mFrameAvailableCondition.wait(mLock);
+ }
+
+ frame = *mFrames.begin();
+ mFrames.erase(mFrames.begin());
+ }
+
+ int count = mNumFrames++;
+
+ *buffer = new CameraBuffer(frame);
+
+ (*buffer)->meta_data()->clear();
+ (*buffer)->meta_data()->setInt32(kKeyTimeScale, 15);
+ (*buffer)->meta_data()->setInt32(kKeyTimeUnits, count);
+
+ (*buffer)->add_ref();
+ (*buffer)->setObserver(this);
+
+ return OK;
+}
+
+void CameraSource::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) {
+ printf("notifyCallback %d, %d, %d\n", msgType, ext1, ext2);
+}
+
+void CameraSource::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ Mutex::Autolock autoLock(mLock);
+
+ mFrames.push_back(data);
+ mFrameAvailableCondition.signal();
+}
+
+void CameraSource::signalBufferReturned(MediaBuffer *_buffer) {
+ CameraBuffer *buffer = static_cast<CameraBuffer *>(_buffer);
+
+ mCamera->releaseRecordingFrame(buffer->releaseFrame());
+
+ buffer->setObserver(NULL);
+ buffer->release();
+ buffer = NULL;
+}
+
+} // namespace android
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
new file mode 100644
index 0000000..6e6b43d
--- /dev/null
+++ b/media/libstagefright/DataSource.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MP3Extractor.h>
+#include <media/stagefright/MPEG4Extractor.h>
+#include <utils/String8.h>
+
+namespace android {
+
+status_t DataSource::getSize(off_t *size) {
+ *size = 0;
+
+ return ERROR_UNSUPPORTED;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Mutex DataSource::gSnifferMutex;
+List<DataSource::SnifferFunc> DataSource::gSniffers;
+
+bool DataSource::sniff(String8 *mimeType, float *confidence) {
+ *mimeType = "";
+ *confidence = 0.0f;
+
+ Mutex::Autolock autoLock(gSnifferMutex);
+ for (List<SnifferFunc>::iterator it = gSniffers.begin();
+ it != gSniffers.end(); ++it) {
+ String8 newMimeType;
+ float newConfidence;
+ if ((*it)(this, &newMimeType, &newConfidence)) {
+ if (newConfidence > *confidence) {
+ *mimeType = newMimeType;
+ *confidence = newConfidence;
+ }
+ }
+ }
+
+ return *confidence > 0.0;
+}
+
+// static
+void DataSource::RegisterSniffer(SnifferFunc func) {
+ Mutex::Autolock autoLock(gSnifferMutex);
+
+ for (List<SnifferFunc>::iterator it = gSniffers.begin();
+ it != gSniffers.end(); ++it) {
+ if (*it == func) {
+ return;
+ }
+ }
+
+ gSniffers.push_back(func);
+}
+
+// static
+void DataSource::RegisterDefaultSniffers() {
+ RegisterSniffer(SniffMP3);
+ RegisterSniffer(SniffMPEG4);
+}
+
+} // namespace android
diff --git a/media/libstagefright/ESDS.cpp b/media/libstagefright/ESDS.cpp
new file mode 100644
index 0000000..53b92a0
--- /dev/null
+++ b/media/libstagefright/ESDS.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/ESDS.h>
+
+#include <string.h>
+
+namespace android {
+
+ESDS::ESDS(const void *data, size_t size)
+ : mData(new uint8_t[size]),
+ mSize(size),
+ mInitCheck(NO_INIT),
+ mDecoderSpecificOffset(0),
+ mDecoderSpecificLength(0) {
+ memcpy(mData, data, size);
+
+ mInitCheck = parse();
+}
+
+ESDS::~ESDS() {
+ delete[] mData;
+ mData = NULL;
+}
+
+status_t ESDS::InitCheck() const {
+ return mInitCheck;
+}
+
+status_t ESDS::getCodecSpecificInfo(const void **data, size_t *size) const {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ *data = &mData[mDecoderSpecificOffset];
+ *size = mDecoderSpecificLength;
+
+ return OK;
+}
+
+status_t ESDS::skipDescriptorHeader(
+ size_t offset, size_t size,
+ uint8_t *tag, size_t *data_offset, size_t *data_size) const {
+ if (size == 0) {
+ return ERROR_MALFORMED;
+ }
+
+ *tag = mData[offset++];
+ --size;
+
+ *data_size = 0;
+ bool more;
+ do {
+ if (size == 0) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t x = mData[offset++];
+ --size;
+
+ *data_size = (*data_size << 7) | (x & 0x7f);
+ more = (x & 0x80) != 0;
+ }
+ while (more);
+
+ if (*data_size > size) {
+ return ERROR_MALFORMED;
+ }
+
+ *data_offset = offset;
+
+ return OK;
+}
+
+status_t ESDS::parse() {
+ uint8_t tag;
+ size_t data_offset;
+ size_t data_size;
+ status_t err =
+ skipDescriptorHeader(0, mSize, &tag, &data_offset, &data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (tag != kTag_ESDescriptor) {
+ return ERROR_MALFORMED;
+ }
+
+ return parseESDescriptor(data_offset, data_size);
+}
+
+status_t ESDS::parseESDescriptor(size_t offset, size_t size) {
+ if (size < 3) {
+ return ERROR_MALFORMED;
+ }
+
+ offset += 2; // skip ES_ID
+ size -= 2;
+
+ unsigned streamDependenceFlag = mData[offset] & 0x80;
+ unsigned URL_Flag = mData[offset] & 0x40;
+ unsigned OCRstreamFlag = mData[offset] & 0x20;
+
+ ++offset;
+ --size;
+
+ if (streamDependenceFlag) {
+ offset += 2;
+ size -= 2;
+ }
+
+ if (URL_Flag) {
+ if (offset >= size) {
+ return ERROR_MALFORMED;
+ }
+ unsigned URLlength = mData[offset];
+ offset += URLlength + 1;
+ size -= URLlength + 1;
+ }
+
+ if (OCRstreamFlag) {
+ offset += 2;
+ size -= 2;
+ }
+
+ if (offset >= size) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t tag;
+ size_t sub_offset, sub_size;
+ status_t err = skipDescriptorHeader(
+ offset, size, &tag, &sub_offset, &sub_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (tag != kTag_DecoderConfigDescriptor) {
+ return ERROR_MALFORMED;
+ }
+
+ err = parseDecoderConfigDescriptor(sub_offset, sub_size);
+
+ return err;
+}
+
+status_t ESDS::parseDecoderConfigDescriptor(size_t offset, size_t size) {
+ if (size < 13) {
+ return ERROR_MALFORMED;
+ }
+
+ offset += 13;
+ size -= 13;
+
+ if (size == 0) {
+ mDecoderSpecificOffset = 0;
+ mDecoderSpecificLength = 0;
+ return OK;
+ }
+
+ uint8_t tag;
+ size_t sub_offset, sub_size;
+ status_t err = skipDescriptorHeader(
+ offset, size, &tag, &sub_offset, &sub_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (tag != kTag_DecoderSpecificInfo) {
+ return ERROR_MALFORMED;
+ }
+
+ mDecoderSpecificOffset = sub_offset;
+ mDecoderSpecificLength = sub_size;
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
new file mode 100644
index 0000000..c26d0a0
--- /dev/null
+++ b/media/libstagefright/FileSource.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/FileSource.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+namespace android {
+
+FileSource::FileSource(const char *filename)
+ : mFile(fopen(filename, "rb")) {
+}
+
+FileSource::~FileSource() {
+ if (mFile != NULL) {
+ fclose(mFile);
+ mFile = NULL;
+ }
+}
+
+status_t FileSource::InitCheck() const {
+ return mFile != NULL ? OK : NO_INIT;
+}
+
+ssize_t FileSource::read_at(off_t offset, void *data, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ int err = fseeko(mFile, offset, SEEK_SET);
+ assert(err != -1);
+
+ ssize_t result = fread(data, 1, size, mFile);
+
+ return result;
+}
+
+} // namespace android
diff --git a/media/libstagefright/HTTPDataSource.cpp b/media/libstagefright/HTTPDataSource.cpp
new file mode 100644
index 0000000..d1f8cd4
--- /dev/null
+++ b/media/libstagefright/HTTPDataSource.cpp
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <stdlib.h>
+
+#include <media/stagefright/HTTPDataSource.h>
+#include <media/stagefright/HTTPStream.h>
+#include <media/stagefright/string.h>
+
+namespace android {
+
+HTTPDataSource::HTTPDataSource(const char *uri)
+ : mHost(NULL),
+ mPort(0),
+ mPath(NULL),
+ mBuffer(malloc(kBufferSize)),
+ mBufferLength(0),
+ mBufferOffset(0) {
+ assert(!strncasecmp("http://", uri, 7));
+
+ string host;
+ string path;
+ int port;
+
+ char *slash = strchr(uri + 7, '/');
+ if (slash == NULL) {
+ host = uri + 7;
+ path = "/";
+ } else {
+ host = string(uri + 7, slash - (uri + 7));
+ path = slash;
+ }
+
+ char *colon = strchr(host.c_str(), ':');
+ if (colon == NULL) {
+ port = 80;
+ } else {
+ char *end;
+ long tmp = strtol(colon + 1, &end, 10);
+ assert(end > colon + 1);
+ assert(tmp > 0 && tmp < 65536);
+ port = tmp;
+
+ host = string(host, 0, colon - host.c_str());
+ }
+
+ LOGI("Connecting to host '%s', port %d, path '%s'",
+ host.c_str(), port, path.c_str());
+
+ mHost = strdup(host.c_str());
+ mPort = port;
+ mPath = strdup(path.c_str());
+
+ status_t err = mHttp.connect(mHost, mPort);
+ assert(err == OK);
+}
+
+HTTPDataSource::HTTPDataSource(const char *host, int port, const char *path)
+ : mHost(strdup(host)),
+ mPort(port),
+ mPath(strdup(path)),
+ mBuffer(malloc(kBufferSize)),
+ mBufferLength(0),
+ mBufferOffset(0) {
+ status_t err = mHttp.connect(mHost, mPort);
+ assert(err == OK);
+}
+
+HTTPDataSource::~HTTPDataSource() {
+ mHttp.disconnect();
+
+ free(mBuffer);
+ mBuffer = NULL;
+
+ free(mPath);
+ mPath = NULL;
+}
+
+ssize_t HTTPDataSource::read_at(off_t offset, void *data, size_t size) {
+ if (offset >= mBufferOffset && offset < mBufferOffset + mBufferLength) {
+ size_t num_bytes_available = mBufferLength - (offset - mBufferOffset);
+
+ size_t copy = num_bytes_available;
+ if (copy > size) {
+ copy = size;
+ }
+
+ memcpy(data, (const char *)mBuffer + (offset - mBufferOffset), copy);
+
+ return copy;
+ }
+
+ mBufferOffset = offset;
+ mBufferLength = 0;
+
+ char host[128];
+ sprintf(host, "Host: %s\r\n", mHost);
+
+ char range[128];
+ sprintf(range, "Range: bytes=%ld-%ld\r\n\r\n",
+ mBufferOffset, mBufferOffset + kBufferSize - 1);
+
+ int http_status;
+
+ status_t err;
+ int attempt = 1;
+ for (;;) {
+ if ((err = mHttp.send("GET ")) != OK
+ || (err = mHttp.send(mPath)) != OK
+ || (err = mHttp.send(" HTTP/1.1\r\n")) != OK
+ || (err = mHttp.send(host)) != OK
+ || (err = mHttp.send(range)) != OK
+ || (err = mHttp.send("\r\n")) != OK
+ || (err = mHttp.receive_header(&http_status)) != OK) {
+
+ if (attempt == 3) {
+ return err;
+ }
+
+ mHttp.connect(mHost, mPort);
+ ++attempt;
+ } else {
+ break;
+ }
+ }
+
+ if ((http_status / 100) != 2) {
+ return UNKNOWN_ERROR;
+ }
+
+ string value;
+ if (!mHttp.find_header_value("Content-Length", &value)) {
+ return UNKNOWN_ERROR;
+ }
+
+ char *end;
+ unsigned long contentLength = strtoul(value.c_str(), &end, 10);
+
+ ssize_t num_bytes_received = mHttp.receive(mBuffer, contentLength);
+
+ if (num_bytes_received <= 0) {
+ return num_bytes_received;
+ }
+
+ mBufferLength = (size_t)num_bytes_received;
+
+ size_t copy = mBufferLength;
+ if (copy > size) {
+ copy = size;
+ }
+
+ memcpy(data, mBuffer, copy);
+
+ return copy;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp
new file mode 100644
index 0000000..29e6f72
--- /dev/null
+++ b/media/libstagefright/HTTPStream.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/socket.h>
+
+#include <arpa/inet.h>
+#include <assert.h>
+#include <ctype.h>
+#include <errno.h>
+#include <netdb.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <media/stagefright/HTTPStream.h>
+
+namespace android {
+
+// static
+const char *HTTPStream::kStatusKey = ":status:";
+
+HTTPStream::HTTPStream()
+ : mState(READY),
+ mSocket(-1) {
+}
+
+HTTPStream::~HTTPStream() {
+ disconnect();
+}
+
+status_t HTTPStream::connect(const char *server, int port) {
+ status_t err = OK;
+
+ if (mState == CONNECTED) {
+ return ERROR_ALREADY_CONNECTED;
+ }
+
+ assert(mSocket == -1);
+ mSocket = socket(AF_INET, SOCK_STREAM, 0);
+
+ if (mSocket < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ struct hostent *ent = gethostbyname(server);
+ if (ent == NULL) {
+ err = ERROR_UNKNOWN_HOST;
+ goto exit1;
+ }
+
+ struct sockaddr_in addr;
+ addr.sin_family = AF_INET;
+ addr.sin_port = htons(port);
+ addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+ memset(addr.sin_zero, 0, sizeof(addr.sin_zero));
+
+ if (::connect(mSocket, (const struct sockaddr *)&addr, sizeof(addr)) < 0) {
+ err = ERROR_CANNOT_CONNECT;
+ goto exit1;
+ }
+
+ mState = CONNECTED;
+
+ return OK;
+
+exit1:
+ close(mSocket);
+ mSocket = -1;
+
+ return err;
+}
+
+status_t HTTPStream::disconnect() {
+ if (mState != CONNECTED) {
+ return ERROR_NOT_CONNECTED;
+ }
+
+ assert(mSocket >= 0);
+ close(mSocket);
+ mSocket = -1;
+
+ mState = READY;
+
+ return OK;
+}
+
+status_t HTTPStream::send(const char *data, size_t size) {
+ if (mState != CONNECTED) {
+ return ERROR_NOT_CONNECTED;
+ }
+
+ while (size > 0) {
+ ssize_t n = ::send(mSocket, data, size, 0);
+
+ if (n < 0) {
+ if (errno == EINTR) {
+ continue;
+ }
+
+ disconnect();
+
+ return ERROR_IO;
+ } else if (n == 0) {
+ disconnect();
+
+ return ERROR_CONNECTION_LOST;
+ }
+
+ size -= (size_t)n;
+ data += (size_t)n;
+ }
+
+ return OK;
+}
+
+status_t HTTPStream::send(const char *data) {
+ return send(data, strlen(data));
+}
+
+status_t HTTPStream::receive_line(char *line, size_t size) {
+ if (mState != CONNECTED) {
+ return ERROR_NOT_CONNECTED;
+ }
+
+ bool saw_CR = false;
+ size_t length = 0;
+
+ for (;;) {
+ char c;
+ ssize_t n = recv(mSocket, &c, 1, 0);
+ if (n < 0) {
+ if (errno == EINTR) {
+ continue;
+ }
+
+ disconnect();
+
+ return ERROR_IO;
+ } else if (n == 0) {
+ disconnect();
+
+ return ERROR_CONNECTION_LOST;
+ }
+
+ if (saw_CR && c == '\n') {
+ // We have a complete line.
+
+ line[length - 1] = '\0';
+ return OK;
+ }
+
+ saw_CR = (c == '\r');
+
+ assert(length + 1 < size);
+ line[length++] = c;
+ }
+}
+
+status_t HTTPStream::receive_header(int *http_status) {
+ *http_status = -1;
+ mHeaders.clear();
+
+ char line[256];
+ status_t err = receive_line(line, sizeof(line));
+ if (err != OK) {
+ return err;
+ }
+
+ mHeaders.add(string(kStatusKey), string(line));
+
+ char *spacePos = strchr(line, ' ');
+ if (spacePos == NULL) {
+ // Malformed response?
+ return UNKNOWN_ERROR;
+ }
+
+ char *status_start = spacePos + 1;
+ char *status_end = status_start;
+ while (isdigit(*status_end)) {
+ ++status_end;
+ }
+
+ if (status_end == status_start) {
+ // Malformed response, status missing?
+ return UNKNOWN_ERROR;
+ }
+
+ memmove(line, status_start, status_end - status_start);
+ line[status_end - status_start] = '\0';
+
+ long tmp = strtol(line, NULL, 10);
+ if (tmp < 0 || tmp > 999) {
+ return UNKNOWN_ERROR;
+ }
+
+ *http_status = (int)tmp;
+
+ for (;;) {
+ err = receive_line(line, sizeof(line));
+ if (err != OK) {
+ return err;
+ }
+
+ if (*line == '\0') {
+ // Empty line signals the end of the header.
+ break;
+ }
+
+ // puts(line);
+
+ char *colonPos = strchr(line, ':');
+ if (colonPos == NULL) {
+ mHeaders.add(string(line), string());
+ } else {
+ char *end_of_key = colonPos;
+ while (end_of_key > line && isspace(end_of_key[-1])) {
+ --end_of_key;
+ }
+
+ char *start_of_value = colonPos + 1;
+ while (isspace(*start_of_value)) {
+ ++start_of_value;
+ }
+
+ *end_of_key = '\0';
+
+ mHeaders.add(string(line), string(start_of_value));
+ }
+ }
+
+ return OK;
+}
+
+ssize_t HTTPStream::receive(void *data, size_t size) {
+ size_t total = 0;
+ while (total < size) {
+ ssize_t n = recv(mSocket, (char *)data + total, size - total, 0);
+
+ if (n < 0) {
+ if (errno == EINTR) {
+ continue;
+ }
+
+ disconnect();
+ return ERROR_IO;
+ } else if (n == 0) {
+ disconnect();
+
+ return ERROR_CONNECTION_LOST;
+ }
+
+ total += (size_t)n;
+ }
+
+ return (ssize_t)total;
+}
+
+bool HTTPStream::find_header_value(const string &key, string *value) const {
+ ssize_t index = mHeaders.indexOfKey(key);
+ if (index < 0) {
+ value->clear();
+ return false;
+ }
+
+ *value = mHeaders.valueAt(index);
+
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
new file mode 100644
index 0000000..6b47a38
--- /dev/null
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -0,0 +1,527 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MP3Extractor"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MP3Extractor.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <utils/String8.h>
+
+namespace android {
+
+static bool get_mp3_frame_size(
+ uint32_t header, size_t *frame_size,
+ int *out_sampling_rate = NULL, int *out_channels = NULL,
+ int *out_bitrate = NULL) {
+ *frame_size = 0;
+
+ if (out_sampling_rate) {
+ *out_sampling_rate = 0;
+ }
+
+ if (out_channels) {
+ *out_channels = 0;
+ }
+
+ if (out_bitrate) {
+ *out_bitrate = 0;
+ }
+
+ if ((header & 0xffe00000) != 0xffe00000) {
+ return false;
+ }
+
+ unsigned version = (header >> 19) & 3;
+
+ if (version == 0x01) {
+ return false;
+ }
+
+ unsigned layer = (header >> 17) & 3;
+
+ if (layer == 0x00) {
+ return false;
+ }
+
+ unsigned protection = (header >> 16) & 1;
+
+ unsigned bitrate_index = (header >> 12) & 0x0f;
+
+ if (bitrate_index == 0 || bitrate_index == 0x0f) {
+ // Disallow "free" bitrate.
+
+ LOGE("We disallow 'free' bitrate for now.");
+ return false;
+ }
+
+ unsigned sampling_rate_index = (header >> 10) & 3;
+
+ if (sampling_rate_index == 3) {
+ return false;
+ }
+
+ static const int kSamplingRateV1[] = { 44100, 48000, 32000 };
+ int sampling_rate = kSamplingRateV1[sampling_rate_index];
+ if (version == 2 /* V2 */) {
+ sampling_rate /= 2;
+ } else if (version == 0 /* V2.5 */) {
+ sampling_rate /= 4;
+ }
+
+ unsigned padding = (header >> 9) & 1;
+
+ if (layer == 3) {
+ // layer I
+
+ static const int kBitrateV1[] = {
+ 32, 64, 96, 128, 160, 192, 224, 256,
+ 288, 320, 352, 384, 416, 448
+ };
+
+ static const int kBitrateV2[] = {
+ 32, 48, 56, 64, 80, 96, 112, 128,
+ 144, 160, 176, 192, 224, 256
+ };
+
+ int bitrate =
+ (version == 3 /* V1 */)
+ ? kBitrateV1[bitrate_index - 1]
+ : kBitrateV2[bitrate_index - 1];
+
+ if (out_bitrate) {
+ *out_bitrate = bitrate;
+ }
+
+ *frame_size = (12000 * bitrate / sampling_rate + padding) * 4;
+ } else {
+ // layer II or III
+
+ static const int kBitrateV1L2[] = {
+ 32, 48, 56, 64, 80, 96, 112, 128,
+ 160, 192, 224, 256, 320, 384
+ };
+
+ static const int kBitrateV1L3[] = {
+ 32, 40, 48, 56, 64, 80, 96, 112,
+ 128, 160, 192, 224, 256, 320
+ };
+
+ static const int kBitrateV2[] = {
+ 8, 16, 24, 32, 40, 48, 56, 64,
+ 80, 96, 112, 128, 144, 160
+ };
+
+ int bitrate;
+ if (version == 3 /* V1 */) {
+ bitrate = (layer == 2 /* L2 */)
+ ? kBitrateV1L2[bitrate_index - 1]
+ : kBitrateV1L3[bitrate_index - 1];
+ } else {
+ // V2 (or 2.5)
+
+ bitrate = kBitrateV2[bitrate_index - 1];
+ }
+
+ if (out_bitrate) {
+ *out_bitrate = bitrate;
+ }
+
+ *frame_size = 144000 * bitrate / sampling_rate + padding;
+ }
+
+ if (out_sampling_rate) {
+ *out_sampling_rate = sampling_rate;
+ }
+
+ if (out_channels) {
+ int channel_mode = (header >> 6) & 3;
+
+ *out_channels = (channel_mode == 3) ? 1 : 2;
+ }
+
+ return true;
+}
+
+static bool Resync(
+ DataSource *source, uint32_t match_header,
+ off_t *inout_pos, uint32_t *out_header) {
+ // Everything must match except for
+ // protection, bitrate, padding, private bits and mode extension.
+ const uint32_t kMask = 0xfffe0ccf;
+
+ const size_t kMaxFrameSize = 4096;
+ uint8_t *buffer = new uint8_t[kMaxFrameSize];
+
+ off_t pos = *inout_pos - kMaxFrameSize;
+ size_t buffer_offset = kMaxFrameSize;
+ size_t buffer_length = kMaxFrameSize;
+ bool valid = false;
+ do {
+ if (buffer_offset + 3 >= buffer_length) {
+ if (buffer_length < kMaxFrameSize) {
+ break;
+ }
+
+ pos += buffer_offset;
+
+ if (pos >= *inout_pos + 128 * 1024) {
+ // Don't scan forever.
+ LOGV("giving up at offset %ld", pos);
+ break;
+ }
+
+ memmove(buffer, &buffer[buffer_offset], buffer_length - buffer_offset);
+ buffer_length = buffer_length - buffer_offset;
+ buffer_offset = 0;
+
+ ssize_t n = source->read_at(
+ pos, &buffer[buffer_length], kMaxFrameSize - buffer_length);
+
+ if (n <= 0) {
+ break;
+ }
+
+ buffer_length += (size_t)n;
+
+ continue;
+ }
+
+ uint32_t header = U32_AT(&buffer[buffer_offset]);
+
+ if (match_header != 0 && (header & kMask) != (match_header & kMask)) {
+ ++buffer_offset;
+ continue;
+ }
+
+ size_t frame_size;
+ int sample_rate, num_channels, bitrate;
+ if (!get_mp3_frame_size(header, &frame_size,
+ &sample_rate, &num_channels, &bitrate)) {
+ ++buffer_offset;
+ continue;
+ }
+
+ LOGV("found possible 1st frame at %ld", pos + buffer_offset);
+
+ // We found what looks like a valid frame,
+ // now find its successors.
+
+ off_t test_pos = pos + buffer_offset + frame_size;
+
+ valid = true;
+ for (int j = 0; j < 3; ++j) {
+ uint8_t tmp[4];
+ if (source->read_at(test_pos, tmp, 4) < 4) {
+ valid = false;
+ break;
+ }
+
+ uint32_t test_header = U32_AT(tmp);
+
+ LOGV("subsequent header is %08x", test_header);
+
+ if ((test_header & kMask) != (header & kMask)) {
+ valid = false;
+ break;
+ }
+
+ size_t test_frame_size;
+ if (!get_mp3_frame_size(test_header, &test_frame_size)) {
+ valid = false;
+ break;
+ }
+
+ LOGV("found subsequent frame #%d at %ld", j + 2, test_pos);
+
+ test_pos += test_frame_size;
+ }
+
+ if (valid) {
+ *inout_pos = pos + buffer_offset;
+
+ if (out_header != NULL) {
+ *out_header = header;
+ }
+ } else {
+ LOGV("no dice, no valid sequence of frames found.");
+ }
+
+ ++buffer_offset;
+
+ } while (!valid);
+
+ delete[] buffer;
+ buffer = NULL;
+
+ return valid;
+}
+
+class MP3Source : public MediaSource {
+public:
+ MP3Source(
+ const sp<MetaData> &meta, DataSource *source,
+ off_t first_frame_pos, uint32_t fixed_header);
+
+ virtual ~MP3Source();
+
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+private:
+ sp<MetaData> mMeta;
+ DataSource *mDataSource;
+ off_t mFirstFramePos;
+ uint32_t mFixedHeader;
+ off_t mCurrentPos;
+ int64_t mCurrentTimeUs;
+ bool mStarted;
+
+ MediaBufferGroup *mGroup;
+
+ MP3Source(const MP3Source &);
+ MP3Source &operator=(const MP3Source &);
+};
+
+MP3Extractor::MP3Extractor(DataSource *source)
+ : mDataSource(source),
+ mFirstFramePos(-1),
+ mFixedHeader(0) {
+ off_t pos = 0;
+ uint32_t header;
+ bool success = Resync(mDataSource, 0, &pos, &header);
+ assert(success);
+
+ if (success) {
+ mFirstFramePos = pos;
+ mFixedHeader = header;
+
+ size_t frame_size;
+ int sample_rate;
+ int num_channels;
+ int bitrate;
+ get_mp3_frame_size(
+ header, &frame_size, &sample_rate, &num_channels, &bitrate);
+
+ mMeta = new MetaData;
+
+ mMeta->setCString(kKeyMIMEType, "audio/mpeg");
+ mMeta->setInt32(kKeySampleRate, sample_rate);
+ mMeta->setInt32(kKeyBitRate, bitrate);
+ mMeta->setInt32(kKeyChannelCount, num_channels);
+
+ off_t fileSize;
+ if (mDataSource->getSize(&fileSize) == OK) {
+ mMeta->setInt32(
+ kKeyDuration,
+ 8 * (fileSize - mFirstFramePos) / bitrate);
+ mMeta->setInt32(kKeyTimeScale, 1000);
+ }
+ }
+}
+
+MP3Extractor::~MP3Extractor() {
+ delete mDataSource;
+ mDataSource = NULL;
+}
+
+status_t MP3Extractor::countTracks(int *num_tracks) {
+ *num_tracks = mFirstFramePos < 0 ? 0 : 1;
+
+ return OK;
+}
+
+status_t MP3Extractor::getTrack(int index, MediaSource **source) {
+ if (mFirstFramePos < 0 || index != 0) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ *source = new MP3Source(
+ mMeta, mDataSource, mFirstFramePos, mFixedHeader);
+
+ return OK;
+}
+
+sp<MetaData> MP3Extractor::getTrackMetaData(int index) {
+ if (mFirstFramePos < 0 || index != 0) {
+ return NULL;
+ }
+
+ return mMeta;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MP3Source::MP3Source(
+ const sp<MetaData> &meta, DataSource *source,
+ off_t first_frame_pos, uint32_t fixed_header)
+ : mMeta(meta),
+ mDataSource(source),
+ mFirstFramePos(first_frame_pos),
+ mFixedHeader(fixed_header),
+ mCurrentPos(0),
+ mCurrentTimeUs(0),
+ mStarted(false),
+ mGroup(NULL) {
+}
+
+MP3Source::~MP3Source() {
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t MP3Source::start(MetaData *) {
+ assert(!mStarted);
+
+ mGroup = new MediaBufferGroup;
+
+ const size_t kMaxFrameSize = 32768;
+ mGroup->add_buffer(new MediaBuffer(kMaxFrameSize));
+
+ mCurrentPos = mFirstFramePos;
+ mCurrentTimeUs = 0;
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t MP3Source::stop() {
+ assert(mStarted);
+
+ delete mGroup;
+ mGroup = NULL;
+
+ mStarted = false;
+
+ return OK;
+}
+
+sp<MetaData> MP3Source::getFormat() {
+ return mMeta;
+}
+
+status_t MP3Source::read(
+ MediaBuffer **out, const ReadOptions *options) {
+ *out = NULL;
+
+ int64_t seekTimeUs;
+ if (options != NULL && options->getSeekTo(&seekTimeUs)) {
+ int32_t bitrate;
+ if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
+ // bitrate is in kbits/sec.
+ LOGI("no bitrate");
+
+ return ERROR_UNSUPPORTED;
+ }
+
+ mCurrentTimeUs = seekTimeUs;
+ mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 1000000 * 125;
+ }
+
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ size_t frame_size;
+ for (;;) {
+ ssize_t n = mDataSource->read_at(mCurrentPos, buffer->data(), 4);
+ if (n < 4) {
+ buffer->release();
+ buffer = NULL;
+
+ return ERROR_END_OF_STREAM;
+ }
+
+ uint32_t header = U32_AT((const uint8_t *)buffer->data());
+
+ if (get_mp3_frame_size(header, &frame_size)) {
+ break;
+ }
+
+ // Lost sync.
+ LOGW("lost sync!\n");
+
+ off_t pos = mCurrentPos;
+ if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) {
+ LOGE("Unable to resync. Signalling end of stream.");
+
+ buffer->release();
+ buffer = NULL;
+
+ return ERROR_END_OF_STREAM;
+ }
+
+ mCurrentPos = pos;
+
+ // Try again with the new position.
+ }
+
+ assert(frame_size <= buffer->size());
+
+ ssize_t n = mDataSource->read_at(mCurrentPos, buffer->data(), frame_size);
+ if (n < (ssize_t)frame_size) {
+ buffer->release();
+ buffer = NULL;
+
+ return ERROR_END_OF_STREAM;
+ }
+
+ buffer->set_range(0, frame_size);
+
+ buffer->meta_data()->setInt32(kKeyTimeUnits, mCurrentTimeUs / 1000);
+ buffer->meta_data()->setInt32(kKeyTimeScale, 1000);
+
+ mCurrentPos += frame_size;
+ mCurrentTimeUs += 1152 * 1000000 / 44100;
+
+ *out = buffer;
+
+ return OK;
+}
+
+bool SniffMP3(DataSource *source, String8 *mimeType, float *confidence) {
+ off_t pos = 0;
+ uint32_t header;
+ if (!Resync(source, 0, &pos, &header)) {
+ return false;
+ }
+
+ *mimeType = "audio/mpeg";
+ *confidence = 0.3f;
+
+ return true;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
new file mode 100644
index 0000000..caaec06
--- /dev/null
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -0,0 +1,937 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MPEG4Extractor"
+#include <utils/Log.h>
+
+#include <arpa/inet.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <ctype.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MPEG4Extractor.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/SampleTable.h>
+#include <media/stagefright/Utils.h>
+#include <utils/String8.h>
+
+namespace android {
+
+class MPEG4Source : public MediaSource {
+public:
+ // Caller retains ownership of both "dataSource" and "sampleTable".
+ MPEG4Source(const sp<MetaData> &format, DataSource *dataSource,
+ SampleTable *sampleTable);
+
+ virtual ~MPEG4Source();
+
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+private:
+ sp<MetaData> mFormat;
+ DataSource *mDataSource;
+ int32_t mTimescale;
+ SampleTable *mSampleTable;
+ uint32_t mCurrentSampleIndex;
+
+ bool mIsAVC;
+ bool mStarted;
+
+ MediaBufferGroup *mGroup;
+
+ MediaBuffer *mBuffer;
+ size_t mBufferOffset;
+ size_t mBufferSizeRemaining;
+
+ bool mNeedsNALFraming;
+
+ MPEG4Source(const MPEG4Source &);
+ MPEG4Source &operator=(const MPEG4Source &);
+};
+
+static void hexdump(const void *_data, size_t size) {
+ const uint8_t *data = (const uint8_t *)_data;
+ size_t offset = 0;
+ while (offset < size) {
+ printf("0x%04x ", offset);
+
+ size_t n = size - offset;
+ if (n > 16) {
+ n = 16;
+ }
+
+ for (size_t i = 0; i < 16; ++i) {
+ if (i == 8) {
+ printf(" ");
+ }
+
+ if (offset + i < size) {
+ printf("%02x ", data[offset + i]);
+ } else {
+ printf(" ");
+ }
+ }
+
+ printf(" ");
+
+ for (size_t i = 0; i < n; ++i) {
+ if (isprint(data[offset + i])) {
+ printf("%c", data[offset + i]);
+ } else {
+ printf(".");
+ }
+ }
+
+ printf("\n");
+
+ offset += 16;
+ }
+}
+
+static const char *const FourCC2MIME(uint32_t fourcc) {
+ switch (fourcc) {
+ case FOURCC('m', 'p', '4', 'a'):
+ return "audio/mp4a-latm";
+
+ case FOURCC('s', 'a', 'm', 'r'):
+ return "audio/3gpp";
+
+ case FOURCC('m', 'p', '4', 'v'):
+ return "video/mp4v-es";
+
+ case FOURCC('s', '2', '6', '3'):
+ return "video/3gpp";
+
+ case FOURCC('a', 'v', 'c', '1'):
+ return "video/avc";
+
+ default:
+ assert(!"should not be here.");
+ return NULL;
+ }
+}
+
+MPEG4Extractor::MPEG4Extractor(DataSource *source)
+ : mDataSource(source),
+ mHaveMetadata(false),
+ mFirstTrack(NULL),
+ mLastTrack(NULL) {
+}
+
+MPEG4Extractor::~MPEG4Extractor() {
+ Track *track = mFirstTrack;
+ while (track) {
+ Track *next = track->next;
+
+ delete track->sampleTable;
+ track->sampleTable = NULL;
+
+ delete track;
+ track = next;
+ }
+ mFirstTrack = mLastTrack = NULL;
+
+ delete mDataSource;
+ mDataSource = NULL;
+}
+
+status_t MPEG4Extractor::countTracks(int *num_tracks) {
+ status_t err;
+ if ((err = readMetaData()) != OK) {
+ return err;
+ }
+
+ *num_tracks = 0;
+ Track *track = mFirstTrack;
+ while (track) {
+ ++*num_tracks;
+ track = track->next;
+ }
+
+ return OK;
+}
+
+sp<MetaData> MPEG4Extractor::getTrackMetaData(int index) {
+ if (index < 0) {
+ return NULL;
+ }
+
+ status_t err;
+ if ((err = readMetaData()) != OK) {
+ return NULL;
+ }
+
+ Track *track = mFirstTrack;
+ while (index > 0) {
+ if (track == NULL) {
+ return NULL;
+ }
+
+ track = track->next;
+ --index;
+ }
+
+ return track->meta;
+}
+
+status_t MPEG4Extractor::readMetaData() {
+ if (mHaveMetadata) {
+ return OK;
+ }
+
+ off_t offset = 0;
+ status_t err;
+ while ((err = parseChunk(&offset, 0)) == OK) {
+ }
+
+ if (mHaveMetadata) {
+ return OK;
+ }
+
+ return err;
+}
+
+static void MakeFourCCString(uint32_t x, char *s) {
+ s[0] = x >> 24;
+ s[1] = (x >> 16) & 0xff;
+ s[2] = (x >> 8) & 0xff;
+ s[3] = x & 0xff;
+ s[4] = '\0';
+}
+
+status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
+ uint32_t hdr[2];
+ if (mDataSource->read_at(*offset, hdr, 8) < 8) {
+ return ERROR_IO;
+ }
+ uint64_t chunk_size = ntohl(hdr[0]);
+ uint32_t chunk_type = ntohl(hdr[1]);
+ off_t data_offset = *offset + 8;
+
+ if (chunk_size == 1) {
+ if (mDataSource->read_at(*offset + 8, &chunk_size, 8) < 8) {
+ return ERROR_IO;
+ }
+ chunk_size = ntoh64(chunk_size);
+ data_offset += 8;
+ }
+
+ char chunk[5];
+ MakeFourCCString(chunk_type, chunk);
+
+#if 0
+ static const char kWhitespace[] = " ";
+ const char *indent = &kWhitespace[sizeof(kWhitespace) - 1 - 2 * depth];
+ printf("%sfound chunk '%s' of size %lld\n", indent, chunk, chunk_size);
+
+ char buffer[256];
+ if (chunk_size <= sizeof(buffer)) {
+ if (mDataSource->read_at(*offset, buffer, chunk_size) < chunk_size) {
+ return ERROR_IO;
+ }
+
+ hexdump(buffer, chunk_size);
+ }
+#endif
+
+ off_t chunk_data_size = *offset + chunk_size - data_offset;
+
+ switch(chunk_type) {
+ case FOURCC('m', 'o', 'o', 'v'):
+ case FOURCC('t', 'r', 'a', 'k'):
+ case FOURCC('m', 'd', 'i', 'a'):
+ case FOURCC('m', 'i', 'n', 'f'):
+ case FOURCC('d', 'i', 'n', 'f'):
+ case FOURCC('s', 't', 'b', 'l'):
+ case FOURCC('m', 'v', 'e', 'x'):
+ case FOURCC('m', 'o', 'o', 'f'):
+ case FOURCC('t', 'r', 'a', 'f'):
+ case FOURCC('m', 'f', 'r', 'a'):
+ case FOURCC('s', 'k', 'i' ,'p'):
+ {
+ off_t stop_offset = *offset + chunk_size;
+ *offset = data_offset;
+ while (*offset < stop_offset) {
+ status_t err = parseChunk(offset, depth + 1);
+ if (err != OK) {
+ return err;
+ }
+ }
+ assert(*offset == stop_offset);
+
+ if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
+ mHaveMetadata = true;
+
+ return UNKNOWN_ERROR; // Return a dummy error.
+ }
+ break;
+ }
+
+ case FOURCC('t', 'k', 'h', 'd'):
+ {
+ assert(chunk_data_size >= 4);
+
+ uint8_t version;
+ if (mDataSource->read_at(data_offset, &version, 1) < 1) {
+ return ERROR_IO;
+ }
+
+ uint64_t ctime, mtime, duration;
+ int32_t id;
+ uint32_t width, height;
+
+ if (version == 1) {
+ if (chunk_data_size != 36 + 60) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[36 + 60];
+ if (mDataSource->read_at(
+ data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) {
+ return ERROR_IO;
+ }
+
+ ctime = U64_AT(&buffer[4]);
+ mtime = U64_AT(&buffer[12]);
+ id = U32_AT(&buffer[20]);
+ duration = U64_AT(&buffer[28]);
+ width = U32_AT(&buffer[88]);
+ height = U32_AT(&buffer[92]);
+ } else if (version == 0) {
+ if (chunk_data_size != 24 + 60) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[24 + 60];
+ if (mDataSource->read_at(
+ data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) {
+ return ERROR_IO;
+ }
+ ctime = U32_AT(&buffer[4]);
+ mtime = U32_AT(&buffer[8]);
+ id = U32_AT(&buffer[12]);
+ duration = U32_AT(&buffer[20]);
+ width = U32_AT(&buffer[76]);
+ height = U32_AT(&buffer[80]);
+ }
+
+ Track *track = new Track;
+ track->next = NULL;
+ if (mLastTrack) {
+ mLastTrack->next = track;
+ } else {
+ mFirstTrack = track;
+ }
+ mLastTrack = track;
+
+ track->meta = new MetaData;
+ track->timescale = 0;
+ track->sampleTable = new SampleTable(mDataSource);
+ track->meta->setCString(kKeyMIMEType, "application/octet-stream");
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('m', 'd', 'h', 'd'):
+ {
+ if (chunk_data_size < 4) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t version;
+ if (mDataSource->read_at(
+ data_offset, &version, sizeof(version))
+ < (ssize_t)sizeof(version)) {
+ return ERROR_IO;
+ }
+
+ off_t timescale_offset;
+
+ if (version == 1) {
+ timescale_offset = data_offset + 4 + 16;
+ } else if (version == 0) {
+ timescale_offset = data_offset + 4 + 8;
+ } else {
+ return ERROR_IO;
+ }
+
+ uint32_t timescale;
+ if (mDataSource->read_at(
+ timescale_offset, &timescale, sizeof(timescale))
+ < (ssize_t)sizeof(timescale)) {
+ return ERROR_IO;
+ }
+
+ mLastTrack->timescale = ntohl(timescale);
+ mLastTrack->meta->setInt32(kKeyTimeScale, mLastTrack->timescale);
+
+ int64_t duration;
+ if (version == 1) {
+ if (mDataSource->read_at(
+ timescale_offset + 4, &duration, sizeof(duration))
+ < (ssize_t)sizeof(duration)) {
+ return ERROR_IO;
+ }
+ duration = ntoh64(duration);
+ } else {
+ int32_t duration32;
+ if (mDataSource->read_at(
+ timescale_offset + 4, &duration32, sizeof(duration32))
+ < (ssize_t)sizeof(duration32)) {
+ return ERROR_IO;
+ }
+ duration = ntohl(duration32);
+ }
+ mLastTrack->meta->setInt32(kKeyDuration, duration);
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('h', 'd', 'l', 'r'):
+ {
+ if (chunk_data_size < 25) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[24];
+ if (mDataSource->read_at(data_offset, buffer, 24) < 24) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(buffer) != 0) {
+ // Should be version 0, flags 0.
+ return ERROR_MALFORMED;
+ }
+
+ if (U32_AT(&buffer[4]) != 0) {
+ // pre_defined should be 0.
+ return ERROR_MALFORMED;
+ }
+
+ mHandlerType = U32_AT(&buffer[8]);
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 't', 's', 'd'):
+ {
+ if (chunk_data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[8];
+ assert(chunk_data_size >= (off_t)sizeof(buffer));
+ if (mDataSource->read_at(
+ data_offset, buffer, 8) < 8) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(buffer) != 0) {
+ // Should be version 0, flags 0.
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t entry_count = U32_AT(&buffer[4]);
+
+ if (entry_count > 1) {
+ // For now we only support a single type of media per track.
+ return ERROR_UNSUPPORTED;
+ }
+
+ off_t stop_offset = *offset + chunk_size;
+ *offset = data_offset + 8;
+ for (uint32_t i = 0; i < entry_count; ++i) {
+ status_t err = parseChunk(offset, depth + 1);
+ if (err != OK) {
+ return err;
+ }
+ }
+ assert(*offset == stop_offset);
+ break;
+ }
+
+ case FOURCC('m', 'p', '4', 'a'):
+ case FOURCC('s', 'a', 'm', 'r'):
+ {
+ if (mHandlerType != FOURCC('s', 'o', 'u', 'n')) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[8 + 20];
+ if (chunk_data_size < (ssize_t)sizeof(buffer)) {
+ // Basic AudioSampleEntry size.
+ return ERROR_MALFORMED;
+ }
+
+ if (mDataSource->read_at(
+ data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) {
+ return ERROR_IO;
+ }
+
+ uint16_t data_ref_index = U16_AT(&buffer[6]);
+ uint16_t num_channels = U16_AT(&buffer[16]);
+
+ if (!strcasecmp("audio/3gpp", FourCC2MIME(chunk_type))) {
+ // AMR audio is always mono.
+ num_channels = 1;
+ }
+
+ uint16_t sample_size = U16_AT(&buffer[18]);
+ uint32_t sample_rate = U32_AT(&buffer[24]) >> 16;
+
+ printf("*** coding='%s' %d channels, size %d, rate %d\n",
+ chunk, num_channels, sample_size, sample_rate);
+
+ mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
+ mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
+ mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
+
+ off_t stop_offset = *offset + chunk_size;
+ *offset = data_offset + sizeof(buffer);
+ while (*offset < stop_offset) {
+ status_t err = parseChunk(offset, depth + 1);
+ if (err != OK) {
+ return err;
+ }
+ }
+ assert(*offset == stop_offset);
+ break;
+ }
+
+ case FOURCC('m', 'p', '4', 'v'):
+ case FOURCC('s', '2', '6', '3'):
+ case FOURCC('a', 'v', 'c', '1'):
+ {
+ if (mHandlerType != FOURCC('v', 'i', 'd', 'e')) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[78];
+ if (chunk_data_size < (ssize_t)sizeof(buffer)) {
+ // Basic VideoSampleEntry size.
+ return ERROR_MALFORMED;
+ }
+
+ if (mDataSource->read_at(
+ data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) {
+ return ERROR_IO;
+ }
+
+ uint16_t data_ref_index = U16_AT(&buffer[6]);
+ uint16_t width = U16_AT(&buffer[6 + 18]);
+ uint16_t height = U16_AT(&buffer[6 + 20]);
+
+ printf("*** coding='%s' width=%d height=%d\n",
+ chunk, width, height);
+
+ mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
+ mLastTrack->meta->setInt32(kKeyWidth, width);
+ mLastTrack->meta->setInt32(kKeyHeight, height);
+
+ off_t stop_offset = *offset + chunk_size;
+ *offset = data_offset + sizeof(buffer);
+ while (*offset < stop_offset) {
+ status_t err = parseChunk(offset, depth + 1);
+ if (err != OK) {
+ return err;
+ }
+ }
+ assert(*offset == stop_offset);
+ break;
+ }
+
+ case FOURCC('s', 't', 'c', 'o'):
+ case FOURCC('c', 'o', '6', '4'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setChunkOffsetParams(
+ chunk_type, data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 't', 's', 'c'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setSampleToChunkParams(
+ data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 't', 's', 'z'):
+ case FOURCC('s', 't', 'z', '2'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setSampleSizeParams(
+ chunk_type, data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 't', 't', 's'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setTimeToSampleParams(
+ data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 't', 's', 's'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setSyncSampleParams(
+ data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('e', 's', 'd', 's'):
+ {
+ if (chunk_data_size < 4) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t buffer[256];
+ if (chunk_data_size > (off_t)sizeof(buffer)) {
+ return ERROR_BUFFER_TOO_SMALL;
+ }
+
+ if (mDataSource->read_at(
+ data_offset, buffer, chunk_data_size) < chunk_data_size) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(buffer) != 0) {
+ // Should be version 0, flags 0.
+ return ERROR_MALFORMED;
+ }
+
+ mLastTrack->meta->setData(
+ kKeyESDS, kTypeESDS, &buffer[4], chunk_data_size - 4);
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('a', 'v', 'c', 'C'):
+ {
+ char buffer[256];
+ if (chunk_data_size > (off_t)sizeof(buffer)) {
+ return ERROR_BUFFER_TOO_SMALL;
+ }
+
+ if (mDataSource->read_at(
+ data_offset, buffer, chunk_data_size) < chunk_data_size) {
+ return ERROR_IO;
+ }
+
+ mLastTrack->meta->setData(
+ kKeyAVCC, kTypeAVCC, buffer, chunk_data_size);
+
+ *offset += chunk_size;
+ break;
+ }
+
+ default:
+ {
+ *offset += chunk_size;
+ break;
+ }
+ }
+
+ return OK;
+}
+
+status_t MPEG4Extractor::getTrack(int index, MediaSource **source) {
+ *source = NULL;
+
+ if (index < 0) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ status_t err;
+ if ((err = readMetaData()) != OK) {
+ return err;
+ }
+
+ Track *track = mFirstTrack;
+ while (index > 0) {
+ if (track == NULL) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ track = track->next;
+ --index;
+ }
+
+ *source = new MPEG4Source(
+ track->meta, mDataSource, track->sampleTable);
+
+ return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG4Source::MPEG4Source(
+ const sp<MetaData> &format,
+ DataSource *dataSource, SampleTable *sampleTable)
+ : mFormat(format),
+ mDataSource(dataSource),
+ mTimescale(0),
+ mSampleTable(sampleTable),
+ mCurrentSampleIndex(0),
+ mIsAVC(false),
+ mStarted(false),
+ mGroup(NULL),
+ mBuffer(NULL),
+ mBufferOffset(0),
+ mBufferSizeRemaining(0),
+ mNeedsNALFraming(false) {
+ const char *mime;
+ bool success = mFormat->findCString(kKeyMIMEType, &mime);
+ assert(success);
+
+ success = mFormat->findInt32(kKeyTimeScale, &mTimescale);
+ assert(success);
+
+ mIsAVC = !strcasecmp(mime, "video/avc");
+}
+
+MPEG4Source::~MPEG4Source() {
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t MPEG4Source::start(MetaData *params) {
+ assert(!mStarted);
+
+ int32_t val;
+ if (mIsAVC && params && params->findInt32(kKeyNeedsNALFraming, &val)
+ && val != 0) {
+ mNeedsNALFraming = true;
+ } else {
+ mNeedsNALFraming = false;
+ }
+
+ mGroup = new MediaBufferGroup;
+
+ size_t max_size;
+ status_t err = mSampleTable->getMaxSampleSize(&max_size);
+ assert(err == OK);
+
+ // Add padding for de-framing of AVC content just in case.
+ mGroup->add_buffer(new MediaBuffer(max_size + 2));
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t MPEG4Source::stop() {
+ assert(mStarted);
+
+ if (mBuffer != NULL) {
+ mBuffer->release();
+ mBuffer = NULL;
+ }
+
+ delete mGroup;
+ mGroup = NULL;
+
+ mStarted = false;
+ mCurrentSampleIndex = 0;
+
+ return OK;
+}
+
+sp<MetaData> MPEG4Source::getFormat() {
+ return mFormat;
+}
+
+status_t MPEG4Source::read(
+ MediaBuffer **out, const ReadOptions *options) {
+ assert(mStarted);
+
+ *out = NULL;
+
+ int64_t seekTimeUs;
+ if (options && options->getSeekTo(&seekTimeUs)) {
+ uint32_t sampleIndex;
+ status_t err = mSampleTable->findClosestSample(
+ seekTimeUs * mTimescale / 1000000,
+ &sampleIndex, SampleTable::kSyncSample_Flag);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mCurrentSampleIndex = sampleIndex;
+ if (mBuffer != NULL) {
+ mBuffer->release();
+ mBuffer = NULL;
+ }
+
+ // fall through
+ }
+
+ if (mBuffer == NULL) {
+ off_t offset;
+ size_t size;
+ status_t err = mSampleTable->getSampleOffsetAndSize(
+ mCurrentSampleIndex, &offset, &size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ uint32_t dts;
+ err = mSampleTable->getDecodingTime(mCurrentSampleIndex, &dts);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mGroup->acquire_buffer(&mBuffer);
+ if (err != OK) {
+ assert(mBuffer == NULL);
+ return err;
+ }
+
+ assert(mBuffer->size() + 2 >= size);
+
+ ssize_t num_bytes_read =
+ mDataSource->read_at(offset, (uint8_t *)mBuffer->data() + 2, size);
+
+ if (num_bytes_read < (ssize_t)size) {
+ mBuffer->release();
+ mBuffer = NULL;
+
+ return err;
+ }
+
+ mBuffer->set_range(2, size);
+ mBuffer->meta_data()->clear();
+ mBuffer->meta_data()->setInt32(kKeyTimeUnits, dts);
+ mBuffer->meta_data()->setInt32(kKeyTimeScale, mTimescale);
+
+ ++mCurrentSampleIndex;
+
+ mBufferOffset = 2;
+ mBufferSizeRemaining = size;
+ }
+
+ if (!mIsAVC) {
+ *out = mBuffer;
+ mBuffer = NULL;
+
+ return OK;
+ }
+
+ uint8_t *data = (uint8_t *)mBuffer->data() + mBufferOffset;
+ assert(mBufferSizeRemaining >= 2);
+
+ size_t nal_length = (data[0] << 8) | data[1];
+ assert(mBufferSizeRemaining >= 2 + nal_length);
+
+ if (mNeedsNALFraming) {
+ // Insert marker.
+ data[-2] = data[-1] = data[0] = 0;
+ data[1] = 1;
+
+ mBuffer->set_range(mBufferOffset - 2, nal_length + 4);
+ } else {
+ mBuffer->set_range(mBufferOffset + 2, nal_length);
+ }
+
+ mBufferOffset += nal_length + 2;
+ mBufferSizeRemaining -= nal_length + 2;
+
+ if (mBufferSizeRemaining > 0) {
+ *out = mBuffer->clone();
+ } else {
+ *out = mBuffer;
+ mBuffer = NULL;
+ }
+
+ return OK;
+}
+
+bool SniffMPEG4(DataSource *source, String8 *mimeType, float *confidence) {
+ uint8_t header[8];
+
+ ssize_t n = source->read_at(4, header, sizeof(header));
+ if (n < (ssize_t)sizeof(header)) {
+ return false;
+ }
+
+ if (!memcmp(header, "ftyp3gp", 7) || !memcmp(header, "ftypmp42", 8)
+ || !memcmp(header, "ftypisom", 8) || !memcmp(header, "ftypM4V ", 8)) {
+ *mimeType = "video/mp4";
+ *confidence = 0.1;
+
+ return true;
+ }
+
+ return false;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
new file mode 100644
index 0000000..6bdf282
--- /dev/null
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -0,0 +1,641 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arpa/inet.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <ctype.h>
+#include <pthread.h>
+
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+class MPEG4Writer::Track {
+public:
+ Track(MPEG4Writer *owner, const sp<MetaData> &meta, MediaSource *source);
+ ~Track();
+
+ void start();
+ void stop();
+
+ int64_t getDuration() const;
+ void writeTrackHeader(int32_t trackID);
+
+private:
+ MPEG4Writer *mOwner;
+ sp<MetaData> mMeta;
+ MediaSource *mSource;
+ volatile bool mDone;
+
+ pthread_t mThread;
+
+ struct SampleInfo {
+ size_t size;
+ off_t offset;
+ int64_t timestamp;
+ };
+ List<SampleInfo> mSampleInfos;
+
+ void *mCodecSpecificData;
+ size_t mCodecSpecificDataSize;
+
+ static void *ThreadWrapper(void *me);
+ void threadEntry();
+
+ Track(const Track &);
+ Track &operator=(const Track &);
+};
+
+MPEG4Writer::MPEG4Writer(const char *filename)
+ : mFile(fopen(filename, "wb")),
+ mOffset(0),
+ mMdatOffset(0) {
+ assert(mFile != NULL);
+}
+
+MPEG4Writer::~MPEG4Writer() {
+ stop();
+
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it) {
+ delete *it;
+ }
+ mTracks.clear();
+}
+
+void MPEG4Writer::addSource(const sp<MetaData> &meta, MediaSource *source) {
+ Track *track = new Track(this, meta, source);
+ mTracks.push_back(track);
+}
+
+void MPEG4Writer::start() {
+ if (mFile == NULL) {
+ return;
+ }
+
+ beginBox("ftyp");
+ writeFourcc("isom");
+ writeInt32(0);
+ writeFourcc("isom");
+ endBox();
+
+ mMdatOffset = mOffset;
+ write("\x00\x00\x00\x01mdat????????", 16);
+
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it) {
+ (*it)->start();
+ }
+}
+
+void MPEG4Writer::stop() {
+ if (mFile == NULL) {
+ return;
+ }
+
+ int64_t max_duration = 0;
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it) {
+ (*it)->stop();
+
+ int64_t duration = (*it)->getDuration();
+ if (duration > max_duration) {
+ max_duration = duration;
+ }
+ }
+
+ // Fix up the size of the 'mdat' chunk.
+ fseek(mFile, mMdatOffset + 8, SEEK_SET);
+ int64_t size = mOffset - mMdatOffset;
+ size = hton64(size);
+ fwrite(&size, 1, 8, mFile);
+ fseek(mFile, mOffset, SEEK_SET);
+
+ time_t now = time(NULL);
+
+ beginBox("moov");
+
+ beginBox("mvhd");
+ writeInt32(0); // version=0, flags=0
+ writeInt32(now); // creation time
+ writeInt32(now); // modification time
+ writeInt32(1000); // timescale
+ writeInt32(max_duration);
+ writeInt32(0x10000); // rate
+ writeInt16(0x100); // volume
+ writeInt16(0); // reserved
+ writeInt32(0); // reserved
+ writeInt32(0); // reserved
+ writeInt32(0x10000); // matrix
+ writeInt32(0);
+ writeInt32(0);
+ writeInt32(0);
+ writeInt32(0x10000);
+ writeInt32(0);
+ writeInt32(0);
+ writeInt32(0);
+ writeInt32(0x40000000);
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(0); // predefined
+ writeInt32(mTracks.size() + 1); // nextTrackID
+ endBox(); // mvhd
+
+ int32_t id = 1;
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it, ++id) {
+ (*it)->writeTrackHeader(id);
+ }
+ endBox(); // moov
+
+ assert(mBoxes.empty());
+
+ fclose(mFile);
+ mFile = NULL;
+}
+
+off_t MPEG4Writer::addSample(MediaBuffer *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ off_t old_offset = mOffset;
+
+ fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
+ 1, buffer->range_length(), mFile);
+
+ mOffset += buffer->range_length();
+
+ return old_offset;
+}
+
+void MPEG4Writer::beginBox(const char *fourcc) {
+ assert(strlen(fourcc) == 4);
+
+ mBoxes.push_back(mOffset);
+
+ writeInt32(0);
+ writeFourcc(fourcc);
+}
+
+void MPEG4Writer::endBox() {
+ assert(!mBoxes.empty());
+
+ off_t offset = *--mBoxes.end();
+ mBoxes.erase(--mBoxes.end());
+
+ fseek(mFile, offset, SEEK_SET);
+ writeInt32(mOffset - offset);
+ mOffset -= 4;
+ fseek(mFile, mOffset, SEEK_SET);
+}
+
+void MPEG4Writer::writeInt8(int8_t x) {
+ fwrite(&x, 1, 1, mFile);
+ ++mOffset;
+}
+
+void MPEG4Writer::writeInt16(int16_t x) {
+ x = htons(x);
+ fwrite(&x, 1, 2, mFile);
+ mOffset += 2;
+}
+
+void MPEG4Writer::writeInt32(int32_t x) {
+ x = htonl(x);
+ fwrite(&x, 1, 4, mFile);
+ mOffset += 4;
+}
+
+void MPEG4Writer::writeInt64(int64_t x) {
+ x = hton64(x);
+ fwrite(&x, 1, 8, mFile);
+ mOffset += 8;
+}
+
+void MPEG4Writer::writeCString(const char *s) {
+ size_t n = strlen(s);
+
+ fwrite(s, 1, n + 1, mFile);
+ mOffset += n + 1;
+}
+
+void MPEG4Writer::writeFourcc(const char *s) {
+ assert(strlen(s) == 4);
+ fwrite(s, 1, 4, mFile);
+ mOffset += 4;
+}
+
+void MPEG4Writer::write(const void *data, size_t size) {
+ fwrite(data, 1, size, mFile);
+ mOffset += size;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG4Writer::Track::Track(
+ MPEG4Writer *owner, const sp<MetaData> &meta, MediaSource *source)
+ : mOwner(owner),
+ mMeta(meta),
+ mSource(source),
+ mDone(false),
+ mCodecSpecificData(NULL),
+ mCodecSpecificDataSize(0) {
+}
+
+MPEG4Writer::Track::~Track() {
+ stop();
+
+ if (mCodecSpecificData != NULL) {
+ free(mCodecSpecificData);
+ mCodecSpecificData = NULL;
+ }
+}
+
+void MPEG4Writer::Track::start() {
+ mSource->start();
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ mDone = false;
+
+ int err = pthread_create(&mThread, &attr, ThreadWrapper, this);
+ assert(err == 0);
+
+ pthread_attr_destroy(&attr);
+}
+
+void MPEG4Writer::Track::stop() {
+ if (mDone) {
+ return;
+ }
+
+ mDone = true;
+
+ void *dummy;
+ pthread_join(mThread, &dummy);
+
+ mSource->stop();
+}
+
+// static
+void *MPEG4Writer::Track::ThreadWrapper(void *me) {
+ Track *track = static_cast<Track *>(me);
+
+ track->threadEntry();
+
+ return NULL;
+}
+
+void MPEG4Writer::Track::threadEntry() {
+ bool is_mpeg4 = false;
+ sp<MetaData> meta = mSource->getFormat();
+ const char *mime;
+ meta->findCString(kKeyMIMEType, &mime);
+ is_mpeg4 = !strcasecmp(mime, "video/mp4v-es");
+
+ MediaBuffer *buffer;
+ while (!mDone && mSource->read(&buffer) == OK) {
+ if (buffer->range_length() == 0) {
+ buffer->release();
+ buffer = NULL;
+
+ continue;
+ }
+
+ if (mCodecSpecificData == NULL && is_mpeg4) {
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+
+ const size_t size = buffer->range_length();
+
+ size_t offset = 0;
+ while (offset + 3 < size) {
+ if (data[offset] == 0x00 && data[offset + 1] == 0x00
+ && data[offset + 2] == 0x01 && data[offset + 3] == 0xb6) {
+ break;
+ }
+
+ ++offset;
+ }
+
+ assert(offset + 3 < size);
+
+ mCodecSpecificDataSize = offset;
+ mCodecSpecificData = malloc(offset);
+ memcpy(mCodecSpecificData, data, offset);
+
+ buffer->set_range(buffer->range_offset() + offset, size - offset);
+ }
+
+ off_t offset = mOwner->addSample(buffer);
+
+ SampleInfo info;
+ info.size = buffer->range_length();
+ info.offset = offset;
+
+ int32_t units, scale;
+ bool success =
+ buffer->meta_data()->findInt32(kKeyTimeUnits, &units);
+ assert(success);
+ success =
+ buffer->meta_data()->findInt32(kKeyTimeScale, &scale);
+ assert(success);
+
+ info.timestamp = (int64_t)units * 1000 / scale;
+
+ mSampleInfos.push_back(info);
+
+ buffer->release();
+ buffer = NULL;
+ }
+}
+
+int64_t MPEG4Writer::Track::getDuration() const {
+ return 10000; // XXX
+}
+
+void MPEG4Writer::Track::writeTrackHeader(int32_t trackID) {
+ const char *mime;
+ bool success = mMeta->findCString(kKeyMIMEType, &mime);
+ assert(success);
+
+ bool is_audio = !strncasecmp(mime, "audio/", 6);
+
+ time_t now = time(NULL);
+
+ mOwner->beginBox("trak");
+
+ mOwner->beginBox("tkhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(now); // creation time
+ mOwner->writeInt32(now); // modification time
+ mOwner->writeInt32(trackID);
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(getDuration());
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // layer
+ mOwner->writeInt16(0); // alternate group
+ mOwner->writeInt16(is_audio ? 0x100 : 0); // volume
+ mOwner->writeInt16(0); // reserved
+
+ mOwner->writeInt32(0x10000); // matrix
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0x10000);
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0x40000000);
+
+ if (is_audio) {
+ mOwner->writeInt32(0);
+ mOwner->writeInt32(0);
+ } else {
+ int32_t width, height;
+ bool success = mMeta->findInt32(kKeyWidth, &width);
+ success = success && mMeta->findInt32(kKeyHeight, &height);
+ assert(success);
+
+ mOwner->writeInt32(width);
+ mOwner->writeInt32(height);
+ }
+ mOwner->endBox(); // tkhd
+
+ mOwner->beginBox("mdia");
+
+ mOwner->beginBox("mdhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(now); // creation time
+ mOwner->writeInt32(now); // modification time
+ mOwner->writeInt32(1000); // timescale
+ mOwner->writeInt32(getDuration());
+ mOwner->writeInt16(0); // language code XXX
+ mOwner->writeInt16(0); // predefined
+ mOwner->endBox();
+
+ mOwner->beginBox("hdlr");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(0); // predefined
+ mOwner->writeFourcc(is_audio ? "soun" : "vide");
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeCString(""); // name
+ mOwner->endBox();
+
+ mOwner->beginBox("minf");
+
+ mOwner->beginBox("dinf");
+ mOwner->beginBox("dref");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(1);
+ mOwner->beginBox("url ");
+ mOwner->writeInt32(1); // version=0, flags=1
+ mOwner->endBox(); // url
+ mOwner->endBox(); // dref
+ mOwner->endBox(); // dinf
+
+ if (is_audio) {
+ mOwner->beginBox("smhd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt16(0); // balance
+ mOwner->writeInt16(0); // reserved
+ mOwner->endBox();
+ } else {
+ mOwner->beginBox("vmhd");
+ mOwner->writeInt32(0x00000001); // version=0, flags=1
+ mOwner->writeInt16(0); // graphics mode
+ mOwner->writeInt16(0); // opcolor
+ mOwner->writeInt16(0);
+ mOwner->writeInt16(0);
+ mOwner->endBox();
+ }
+ mOwner->endBox(); // minf
+
+ mOwner->beginBox("stbl");
+
+ mOwner->beginBox("stsd");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(1); // entry count
+ if (is_audio) {
+ mOwner->beginBox("xxxx"); // audio format XXX
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt16(0); // data ref index
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(2); // channel count
+ mOwner->writeInt16(16); // sample size
+ mOwner->writeInt16(0); // predefined
+ mOwner->writeInt16(0); // reserved
+
+ int32_t samplerate;
+ bool success = mMeta->findInt32(kKeySampleRate, &samplerate);
+ assert(success);
+
+ mOwner->writeInt32(samplerate << 16);
+ mOwner->endBox();
+ } else {
+ if (!strcasecmp("video/mp4v-es", mime)) {
+ mOwner->beginBox("mp4v");
+ } else if (!strcasecmp("video/3gpp", mime)) {
+ mOwner->beginBox("s263");
+ } else {
+ assert(!"should not be here, unknown mime type.");
+ }
+
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt16(0); // data ref index
+ mOwner->writeInt16(0); // predefined
+ mOwner->writeInt16(0); // reserved
+ mOwner->writeInt32(0); // predefined
+ mOwner->writeInt32(0); // predefined
+ mOwner->writeInt32(0); // predefined
+
+ int32_t width, height;
+ bool success = mMeta->findInt32(kKeyWidth, &width);
+ success = success && mMeta->findInt32(kKeyHeight, &height);
+ assert(success);
+
+ mOwner->writeInt16(width);
+ mOwner->writeInt16(height);
+ mOwner->writeInt32(0x480000); // horiz resolution
+ mOwner->writeInt32(0x480000); // vert resolution
+ mOwner->writeInt32(0); // reserved
+ mOwner->writeInt16(1); // frame count
+ mOwner->write(" ", 32);
+ mOwner->writeInt16(0x18); // depth
+ mOwner->writeInt16(-1); // predefined
+
+ assert(23 + mCodecSpecificDataSize < 128);
+
+ if (!strcasecmp("video/mp4v-es", mime)) {
+ mOwner->beginBox("esds");
+
+ mOwner->writeInt32(0); // version=0, flags=0
+
+ mOwner->writeInt8(0x03); // ES_DescrTag
+ mOwner->writeInt8(23 + mCodecSpecificDataSize);
+ mOwner->writeInt16(0x0000); // ES_ID
+ mOwner->writeInt8(0x1f);
+
+ mOwner->writeInt8(0x04); // DecoderConfigDescrTag
+ mOwner->writeInt8(15 + mCodecSpecificDataSize);
+ mOwner->writeInt8(0x20); // objectTypeIndication ISO/IEC 14492-2
+ mOwner->writeInt8(0x11); // streamType VisualStream
+
+ static const uint8_t kData[] = {
+ 0x01, 0x77, 0x00,
+ 0x00, 0x03, 0xe8, 0x00,
+ 0x00, 0x03, 0xe8, 0x00
+ };
+ mOwner->write(kData, sizeof(kData));
+
+ mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
+
+ mOwner->writeInt8(mCodecSpecificDataSize);
+ mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+
+ static const uint8_t kData2[] = {
+ 0x06, // SLConfigDescriptorTag
+ 0x01,
+ 0x02
+ };
+ mOwner->write(kData2, sizeof(kData2));
+
+ mOwner->endBox(); // esds
+ } else if (!strcasecmp("video/3gpp", mime)) {
+ mOwner->beginBox("d263");
+
+ mOwner->writeInt32(0); // vendor
+ mOwner->writeInt8(0); // decoder version
+ mOwner->writeInt8(10); // level: 10
+ mOwner->writeInt8(0); // profile: 0
+
+ mOwner->endBox(); // d263
+ }
+ mOwner->endBox(); // mp4v or s263
+ }
+ mOwner->endBox(); // stsd
+
+ mOwner->beginBox("stts");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mSampleInfos.size() - 1);
+
+ List<SampleInfo>::iterator it = mSampleInfos.begin();
+ int64_t last = (*it).timestamp;
+ ++it;
+ while (it != mSampleInfos.end()) {
+ mOwner->writeInt32(1);
+ mOwner->writeInt32((*it).timestamp - last);
+
+ last = (*it).timestamp;
+
+ ++it;
+ }
+ mOwner->endBox(); // stts
+
+ mOwner->beginBox("stsz");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(0); // default sample size
+ mOwner->writeInt32(mSampleInfos.size());
+ for (List<SampleInfo>::iterator it = mSampleInfos.begin();
+ it != mSampleInfos.end(); ++it) {
+ mOwner->writeInt32((*it).size);
+ }
+ mOwner->endBox(); // stsz
+
+ mOwner->beginBox("stsc");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mSampleInfos.size());
+ int32_t n = 1;
+ for (List<SampleInfo>::iterator it = mSampleInfos.begin();
+ it != mSampleInfos.end(); ++it, ++n) {
+ mOwner->writeInt32(n);
+ mOwner->writeInt32(1);
+ mOwner->writeInt32(1);
+ }
+ mOwner->endBox(); // stsc
+
+ mOwner->beginBox("co64");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(mSampleInfos.size());
+ for (List<SampleInfo>::iterator it = mSampleInfos.begin();
+ it != mSampleInfos.end(); ++it, ++n) {
+ mOwner->writeInt64((*it).offset);
+ }
+ mOwner->endBox(); // co64
+
+ mOwner->endBox(); // stbl
+ mOwner->endBox(); // mdia
+ mOwner->endBox(); // trak
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
new file mode 100644
index 0000000..cd78dbd
--- /dev/null
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaBuffer"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <errno.h>
+#include <pthread.h>
+#include <stdlib.h>
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+// XXX make this truly atomic.
+static int atomic_add(int *value, int delta) {
+ int prev_value = *value;
+ *value += delta;
+
+ return prev_value;
+}
+
+MediaBuffer::MediaBuffer(void *data, size_t size)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(data),
+ mSize(size),
+ mRangeOffset(0),
+ mRangeLength(size),
+ mOwnsData(false),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
+MediaBuffer::MediaBuffer(size_t size)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(malloc(size)),
+ mSize(size),
+ mRangeOffset(0),
+ mRangeLength(size),
+ mOwnsData(true),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
+void MediaBuffer::release() {
+ if (mObserver == NULL) {
+ assert(mRefCount == 0);
+ delete this;
+ return;
+ }
+
+ int prevCount = atomic_add(&mRefCount, -1);
+ if (prevCount == 1) {
+ if (mObserver == NULL) {
+ delete this;
+ return;
+ }
+
+ mObserver->signalBufferReturned(this);
+ }
+ assert(prevCount > 0);
+}
+
+void MediaBuffer::claim() {
+ assert(mObserver != NULL);
+ assert(mRefCount == 1);
+
+ mRefCount = 0;
+}
+
+void MediaBuffer::add_ref() {
+ atomic_add(&mRefCount, 1);
+}
+
+void *MediaBuffer::data() const {
+ return mData;
+}
+
+size_t MediaBuffer::size() const {
+ return mSize;
+}
+
+size_t MediaBuffer::range_offset() const {
+ return mRangeOffset;
+}
+
+size_t MediaBuffer::range_length() const {
+ return mRangeLength;
+}
+
+void MediaBuffer::set_range(size_t offset, size_t length) {
+ if (offset < 0 || offset + length > mSize) {
+ LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
+ }
+ assert(offset >= 0 && offset + length <= mSize);
+
+ mRangeOffset = offset;
+ mRangeLength = length;
+}
+
+sp<MetaData> MediaBuffer::meta_data() {
+ return mMetaData;
+}
+
+void MediaBuffer::reset() {
+ mMetaData->clear();
+ set_range(0, mSize);
+}
+
+MediaBuffer::~MediaBuffer() {
+ assert(mObserver == NULL);
+
+ if (mOwnsData && mData != NULL) {
+ free(mData);
+ mData = NULL;
+ }
+
+ if (mOriginal != NULL) {
+ mOriginal->release();
+ mOriginal = NULL;
+ }
+}
+
+void MediaBuffer::setObserver(MediaBufferObserver *observer) {
+ assert(observer == NULL || mObserver == NULL);
+ mObserver = observer;
+}
+
+void MediaBuffer::setNextBuffer(MediaBuffer *buffer) {
+ mNextBuffer = buffer;
+}
+
+MediaBuffer *MediaBuffer::nextBuffer() {
+ return mNextBuffer;
+}
+
+int MediaBuffer::refcount() const {
+ return mRefCount;
+}
+
+MediaBuffer *MediaBuffer::clone() {
+ MediaBuffer *buffer = new MediaBuffer(mData, mSize);
+ buffer->set_range(mRangeOffset, mRangeLength);
+ buffer->mMetaData = new MetaData(*mMetaData.get());
+
+ add_ref();
+ buffer->mOriginal = this;
+
+ return buffer;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MediaBufferGroup.cpp b/media/libstagefright/MediaBufferGroup.cpp
new file mode 100644
index 0000000..aec7722
--- /dev/null
+++ b/media/libstagefright/MediaBufferGroup.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaBufferGroup"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+
+namespace android {
+
+MediaBufferGroup::MediaBufferGroup()
+ : mFirstBuffer(NULL),
+ mLastBuffer(NULL) {
+}
+
+MediaBufferGroup::~MediaBufferGroup() {
+ MediaBuffer *next;
+ for (MediaBuffer *buffer = mFirstBuffer; buffer != NULL;
+ buffer = next) {
+ next = buffer->nextBuffer();
+
+ assert(buffer->refcount() == 0);
+
+ buffer->setObserver(NULL);
+ buffer->release();
+ }
+}
+
+void MediaBufferGroup::add_buffer(MediaBuffer *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ buffer->setObserver(this);
+
+ if (mLastBuffer) {
+ mLastBuffer->setNextBuffer(buffer);
+ } else {
+ mFirstBuffer = buffer;
+ }
+
+ mLastBuffer = buffer;
+}
+
+status_t MediaBufferGroup::acquire_buffer(MediaBuffer **out) {
+ Mutex::Autolock autoLock(mLock);
+
+ for (;;) {
+ for (MediaBuffer *buffer = mFirstBuffer;
+ buffer != NULL; buffer = buffer->nextBuffer()) {
+ if (buffer->refcount() == 0) {
+ buffer->add_ref();
+ buffer->reset();
+
+ *out = buffer;
+ goto exit;
+ }
+ }
+
+ // All buffers are in use. Block until one of them is returned to us.
+ mCondition.wait(mLock);
+ }
+
+exit:
+ return OK;
+}
+
+void MediaBufferGroup::signalBufferReturned(MediaBuffer *) {
+ Mutex::Autolock autoLock(mLock);
+ mCondition.signal();
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
new file mode 100644
index 0000000..bc66794
--- /dev/null
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaExtractor"
+#include <utils/Log.h>
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MP3Extractor.h>
+#include <media/stagefright/MPEG4Extractor.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <utils/String8.h>
+
+namespace android {
+
+// static
+MediaExtractor *MediaExtractor::Create(DataSource *source, const char *mime) {
+ String8 tmp;
+ if (mime == NULL) {
+ float confidence;
+ if (!source->sniff(&tmp, &confidence)) {
+ LOGE("FAILED to autodetect media content.");
+
+ return NULL;
+ }
+
+ mime = tmp.string();
+ LOGI("Autodetected media content as '%s' with confidence %.2f",
+ mime, confidence);
+ }
+
+ if (!strcasecmp(mime, "video/mp4") || !strcasecmp(mime, "audio/mp4")) {
+ return new MPEG4Extractor(source);
+ } else if (!strcasecmp(mime, "audio/mpeg")) {
+ return new MP3Extractor(source);
+ }
+
+ return NULL;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaPlayerImpl.cpp b/media/libstagefright/MediaPlayerImpl.cpp
new file mode 100644
index 0000000..78fcdee
--- /dev/null
+++ b/media/libstagefright/MediaPlayerImpl.cpp
@@ -0,0 +1,693 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayerImpl"
+#include "utils/Log.h"
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <OMX_Component.h>
+
+#include <unistd.h>
+
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/CachingDataSource.h>
+// #include <media/stagefright/CameraSource.h>
+#include <media/stagefright/HTTPDataSource.h>
+#include <media/stagefright/HTTPStream.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaPlayerImpl.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MmapSource.h>
+#include <media/stagefright/OMXDecoder.h>
+#include <media/stagefright/QComHardwareRenderer.h>
+#include <media/stagefright/ShoutcastSource.h>
+#include <media/stagefright/SoftwareRenderer.h>
+#include <media/stagefright/SurfaceRenderer.h>
+#include <media/stagefright/TimeSource.h>
+#include <ui/PixelFormat.h>
+#include <ui/Surface.h>
+
+namespace android {
+
+MediaPlayerImpl::MediaPlayerImpl(const char *uri)
+ : mInitCheck(NO_INIT),
+ mExtractor(NULL),
+ mTimeSource(NULL),
+ mAudioSource(NULL),
+ mAudioDecoder(NULL),
+ mAudioPlayer(NULL),
+ mVideoSource(NULL),
+ mVideoDecoder(NULL),
+ mVideoWidth(0),
+ mVideoHeight(0),
+ mVideoPosition(0),
+ mDuration(0),
+ mPlaying(false),
+ mPaused(false),
+ mRenderer(NULL),
+ mSeeking(false),
+ mFrameSize(0),
+ mUseSoftwareColorConversion(false) {
+ LOGI("MediaPlayerImpl(%s)", uri);
+ DataSource::RegisterDefaultSniffers();
+
+ status_t err = mClient.connect();
+ if (err != OK) {
+ LOGE("Failed to connect to OMXClient.");
+ return;
+ }
+
+ if (!strncasecmp("shoutcast://", uri, 12)) {
+ setAudioSource(makeShoutcastSource(uri));
+#if 0
+ } else if (!strncasecmp("camera:", uri, 7)) {
+ mVideoWidth = 480;
+ mVideoHeight = 320;
+ mVideoDecoder = CameraSource::Create();
+#endif
+ } else {
+ DataSource *source = NULL;
+ if (!strncasecmp("file://", uri, 7)) {
+ source = new MmapSource(uri + 7);
+ } else if (!strncasecmp("http://", uri, 7)) {
+ source = new HTTPDataSource(uri);
+ source = new CachingDataSource(source, 64 * 1024, 10);
+ } else {
+ // Assume it's a filename.
+ source = new MmapSource(uri);
+ }
+
+ mExtractor = MediaExtractor::Create(source);
+
+ if (mExtractor == NULL) {
+ return;
+ }
+ }
+
+ init();
+
+ mInitCheck = OK;
+}
+
+MediaPlayerImpl::MediaPlayerImpl(int fd, int64_t offset, int64_t length)
+ : mInitCheck(NO_INIT),
+ mExtractor(NULL),
+ mTimeSource(NULL),
+ mAudioSource(NULL),
+ mAudioDecoder(NULL),
+ mAudioPlayer(NULL),
+ mVideoSource(NULL),
+ mVideoDecoder(NULL),
+ mVideoWidth(0),
+ mVideoHeight(0),
+ mVideoPosition(0),
+ mDuration(0),
+ mPlaying(false),
+ mPaused(false),
+ mRenderer(NULL),
+ mSeeking(false),
+ mFrameSize(0),
+ mUseSoftwareColorConversion(false) {
+ LOGI("MediaPlayerImpl(%d, %lld, %lld)", fd, offset, length);
+ DataSource::RegisterDefaultSniffers();
+
+ status_t err = mClient.connect();
+ if (err != OK) {
+ LOGE("Failed to connect to OMXClient.");
+ return;
+ }
+
+ mExtractor = MediaExtractor::Create(
+ new MmapSource(fd, offset, length));
+
+ if (mExtractor == NULL) {
+ return;
+ }
+
+ init();
+
+ mInitCheck = OK;
+}
+
+status_t MediaPlayerImpl::initCheck() const {
+ return mInitCheck;
+}
+
+MediaPlayerImpl::~MediaPlayerImpl() {
+ stop();
+ setSurface(NULL);
+
+ LOGV("Shutting down audio.");
+ delete mAudioDecoder;
+ mAudioDecoder = NULL;
+
+ delete mAudioSource;
+ mAudioSource = NULL;
+
+ LOGV("Shutting down video.");
+ delete mVideoDecoder;
+ mVideoDecoder = NULL;
+
+ delete mVideoSource;
+ mVideoSource = NULL;
+
+ delete mExtractor;
+ mExtractor = NULL;
+
+ if (mInitCheck == OK) {
+ mClient.disconnect();
+ }
+
+ LOGV("~MediaPlayerImpl done.");
+}
+
+void MediaPlayerImpl::play() {
+ LOGI("play");
+
+ if (mPlaying) {
+ if (mPaused) {
+ if (mAudioSource != NULL) {
+ mAudioPlayer->resume();
+ }
+ mPaused = false;
+ }
+ return;
+ }
+
+ mPlaying = true;
+
+ if (mAudioSource != NULL) {
+ mAudioPlayer = new AudioPlayer(mAudioSink);
+ mAudioPlayer->setSource(mAudioDecoder);
+ mAudioPlayer->start();
+ mTimeSource = mAudioPlayer;
+ } else {
+ mTimeSource = new SystemTimeSource;
+ }
+
+ if (mVideoDecoder != NULL) {
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mVideoThread, &attr, VideoWrapper, this);
+
+ pthread_attr_destroy(&attr);
+ }
+}
+
+void MediaPlayerImpl::pause() {
+ if (!mPlaying || mPaused) {
+ return;
+ }
+
+ if (mAudioSource != NULL) {
+ mAudioPlayer->pause();
+ }
+
+ mPaused = true;
+}
+
+void MediaPlayerImpl::stop() {
+ if (!mPlaying) {
+ return;
+ }
+
+ mPlaying = false;
+
+ if (mVideoDecoder != NULL) {
+ void *dummy;
+ pthread_join(mVideoThread, &dummy);
+ }
+
+ if (mAudioSource != NULL) {
+ mAudioPlayer->stop();
+
+ delete mAudioPlayer;
+ mAudioPlayer = NULL;
+ } else {
+ delete mTimeSource;
+ }
+
+ mTimeSource = NULL;
+}
+
+// static
+void *MediaPlayerImpl::VideoWrapper(void *me) {
+ ((MediaPlayerImpl *)me)->videoEntry();
+
+ return NULL;
+}
+
+void MediaPlayerImpl::videoEntry() {
+ bool firstFrame = true;
+ bool eof = false;
+
+ status_t err = mVideoDecoder->start();
+ assert(err == OK);
+
+ while (mPlaying) {
+ MediaBuffer *buffer;
+
+ MediaSource::ReadOptions options;
+ bool seeking = false;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mSeeking) {
+ LOGI("seek-options to %lld", mSeekTimeUs);
+ options.setSeekTo(mSeekTimeUs);
+
+ mSeeking = false;
+ seeking = true;
+ eof = false;
+ }
+ }
+
+ if (eof || mPaused) {
+ usleep(100000);
+ continue;
+ }
+
+ status_t err = mVideoDecoder->read(&buffer, &options);
+ assert((err == OK && buffer != NULL) || (err != OK && buffer == NULL));
+
+ if (err == ERROR_END_OF_STREAM || err != OK) {
+ eof = true;
+ continue;
+ }
+
+ if (buffer->range_length() == 0) {
+ // The final buffer is empty.
+ buffer->release();
+ continue;
+ }
+
+ int32_t units, scale;
+ bool success =
+ buffer->meta_data()->findInt32(kKeyTimeUnits, &units);
+ assert(success);
+ success =
+ buffer->meta_data()->findInt32(kKeyTimeScale, &scale);
+ assert(success);
+
+ int64_t pts_us = (int64_t)units * 1000000 / scale;
+ {
+ Mutex::Autolock autoLock(mLock);
+ mVideoPosition = pts_us;
+ }
+
+ if (seeking && mAudioPlayer != NULL) {
+ // Now that we know where exactly video seeked (taking sync-samples
+ // into account), we will seek the audio track to the same time.
+ mAudioPlayer->seekTo(pts_us);
+ }
+
+ if (firstFrame || seeking) {
+ mTimeSourceDeltaUs = mTimeSource->getRealTimeUs() - pts_us;
+ firstFrame = false;
+ }
+
+ displayOrDiscardFrame(buffer, pts_us);
+ }
+
+ mVideoDecoder->stop();
+}
+
+void MediaPlayerImpl::displayOrDiscardFrame(
+ MediaBuffer *buffer, int64_t pts_us) {
+ for (;;) {
+ if (!mPlaying || mPaused) {
+ buffer->release();
+ buffer = NULL;
+
+ return;
+ }
+
+ int64_t realtime_us, mediatime_us;
+ if (mAudioPlayer != NULL
+ && mAudioPlayer->getMediaTimeMapping(&realtime_us, &mediatime_us)) {
+ mTimeSourceDeltaUs = realtime_us - mediatime_us;
+ }
+
+ int64_t now_us = mTimeSource->getRealTimeUs();
+ now_us -= mTimeSourceDeltaUs;
+
+ int64_t delay_us = pts_us - now_us;
+
+ if (delay_us < -15000) {
+ // We're late.
+
+ LOGI("we're late by %lld ms, dropping a frame\n",
+ -delay_us / 1000);
+
+ buffer->release();
+ buffer = NULL;
+ return;
+ } else if (delay_us > 100000) {
+ LOGI("we're much too early (by %lld ms)\n",
+ delay_us / 1000);
+ usleep(100000);
+ continue;
+ } else if (delay_us > 0) {
+ usleep(delay_us);
+ }
+
+ break;
+ }
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mRenderer != NULL) {
+ sendFrameToISurface(buffer);
+ }
+ }
+
+ buffer->release();
+ buffer = NULL;
+}
+
+void MediaPlayerImpl::init() {
+ if (mExtractor != NULL) {
+ int num_tracks;
+ assert(mExtractor->countTracks(&num_tracks) == OK);
+
+ mDuration = 0;
+
+ for (int i = 0; i < num_tracks; ++i) {
+ const sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ assert(meta != NULL);
+
+ const char *mime;
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ continue;
+ }
+
+ bool is_audio = false;
+ bool is_acceptable = false;
+ if (!strncasecmp(mime, "audio/", 6)) {
+ is_audio = true;
+ is_acceptable = (mAudioSource == NULL);
+ } else if (!strncasecmp(mime, "video/", 6)) {
+ is_acceptable = (mVideoSource == NULL);
+ }
+
+ if (!is_acceptable) {
+ continue;
+ }
+
+ MediaSource *source;
+ if (mExtractor->getTrack(i, &source) != OK) {
+ continue;
+ }
+
+ int32_t units, scale;
+ if (meta->findInt32(kKeyDuration, &units)
+ && meta->findInt32(kKeyTimeScale, &scale)) {
+ int64_t duration_us = (int64_t)units * 1000000 / scale;
+ if (duration_us > mDuration) {
+ mDuration = duration_us;
+ }
+ }
+
+ if (is_audio) {
+ setAudioSource(source);
+ } else {
+ setVideoSource(source);
+ }
+ }
+ }
+}
+
+void MediaPlayerImpl::setAudioSource(MediaSource *source) {
+ mAudioSource = source;
+
+ sp<MetaData> meta = source->getFormat();
+
+ mAudioDecoder = OMXDecoder::Create(&mClient, meta);
+ mAudioDecoder->setSource(source);
+}
+
+void MediaPlayerImpl::setVideoSource(MediaSource *source) {
+ LOGI("setVideoSource");
+ mVideoSource = source;
+
+ sp<MetaData> meta = source->getFormat();
+
+ bool success = meta->findInt32(kKeyWidth, &mVideoWidth);
+ assert(success);
+
+ success = meta->findInt32(kKeyHeight, &mVideoHeight);
+ assert(success);
+
+ mVideoDecoder = OMXDecoder::Create(&mClient, meta);
+ ((OMXDecoder *)mVideoDecoder)->setSource(source);
+
+ if (mISurface.get() != NULL || mSurface.get() != NULL) {
+ depopulateISurface();
+ populateISurface();
+ }
+}
+
+void MediaPlayerImpl::setSurface(const sp<Surface> &surface) {
+ LOGI("setSurface %p", surface.get());
+ Mutex::Autolock autoLock(mLock);
+
+ depopulateISurface();
+
+ mSurface = surface;
+ mISurface = NULL;
+
+ if (mSurface.get() != NULL) {
+ populateISurface();
+ }
+}
+
+void MediaPlayerImpl::setISurface(const sp<ISurface> &isurface) {
+ LOGI("setISurface %p", isurface.get());
+ Mutex::Autolock autoLock(mLock);
+
+ depopulateISurface();
+
+ mSurface = NULL;
+ mISurface = isurface;
+
+ if (mISurface.get() != NULL) {
+ populateISurface();
+ }
+}
+
+MediaSource *MediaPlayerImpl::makeShoutcastSource(const char *uri) {
+ if (strncasecmp(uri, "shoutcast://", 12)) {
+ return NULL;
+ }
+
+ string host;
+ string path;
+ int port;
+
+ char *slash = strchr(uri + 12, '/');
+ if (slash == NULL) {
+ host = uri + 12;
+ path = "/";
+ } else {
+ host = string(uri + 12, slash - (uri + 12));
+ path = slash;
+ }
+
+ char *colon = strchr(host.c_str(), ':');
+ if (colon == NULL) {
+ port = 80;
+ } else {
+ char *end;
+ long tmp = strtol(colon + 1, &end, 10);
+ assert(end > colon + 1);
+ assert(tmp > 0 && tmp < 65536);
+ port = tmp;
+
+ host = string(host, 0, colon - host.c_str());
+ }
+
+ LOGI("Connecting to host '%s', port %d, path '%s'",
+ host.c_str(), port, path.c_str());
+
+ HTTPStream *http = new HTTPStream;
+ int http_status;
+
+ for (;;) {
+ status_t err = http->connect(host.c_str(), port);
+ assert(err == OK);
+
+ err = http->send("GET ");
+ err = http->send(path.c_str());
+ err = http->send(" HTTP/1.1\r\n");
+ err = http->send("Host: ");
+ err = http->send(host.c_str());
+ err = http->send("\r\n");
+ err = http->send("Icy-MetaData: 1\r\n\r\n");
+
+ assert(OK == http->receive_header(&http_status));
+
+ if (http_status == 301 || http_status == 302) {
+ string location;
+ assert(http->find_header_value("Location", &location));
+
+ assert(string(location, 0, 7) == "http://");
+ location.erase(0, 7);
+ string::size_type slashPos = location.find('/');
+ if (slashPos == string::npos) {
+ slashPos = location.size();
+ location += '/';
+ }
+
+ http->disconnect();
+
+ LOGI("Redirecting to %s\n", location.c_str());
+
+ host = string(location, 0, slashPos);
+
+ string::size_type colonPos = host.find(':');
+ if (colonPos != string::npos) {
+ const char *start = host.c_str() + colonPos + 1;
+ char *end;
+ long tmp = strtol(start, &end, 10);
+ assert(end > start && *end == '\0');
+
+ port = (tmp >= 0 && tmp < 65536) ? (int)tmp : 80;
+ } else {
+ port = 80;
+ }
+
+ path = string(location, slashPos);
+
+ continue;
+ }
+
+ break;
+ }
+
+ if (http_status != 200) {
+ LOGE("Connection failed: http_status = %d", http_status);
+ return NULL;
+ }
+
+ MediaSource *source = new ShoutcastSource(http);
+
+ return source;
+}
+
+bool MediaPlayerImpl::isPlaying() const {
+ return mPlaying && !mPaused;
+}
+
+int64_t MediaPlayerImpl::getDuration() {
+ return mDuration;
+}
+
+int64_t MediaPlayerImpl::getPosition() {
+ int64_t position = 0;
+ if (mVideoSource != NULL) {
+ Mutex::Autolock autoLock(mLock);
+ position = mVideoPosition;
+ } else if (mAudioPlayer != NULL) {
+ position = mAudioPlayer->getMediaTimeUs();
+ }
+
+ return position;
+}
+
+status_t MediaPlayerImpl::seekTo(int64_t time) {
+ LOGI("seekTo %lld", time);
+
+ if (mPaused) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (mVideoSource == NULL && mAudioPlayer != NULL) {
+ mAudioPlayer->seekTo(time);
+ } else {
+ Mutex::Autolock autoLock(mLock);
+ mSeekTimeUs = time;
+ mSeeking = true;
+ }
+
+ return OK;
+}
+
+void MediaPlayerImpl::populateISurface() {
+ if (mVideoSource == NULL) {
+ return;
+ }
+
+ sp<MetaData> meta = mVideoDecoder->getFormat();
+
+ int32_t format;
+ const char *component;
+ int32_t decodedWidth, decodedHeight;
+ bool success = meta->findInt32(kKeyColorFormat, &format);
+ success = success && meta->findCString(kKeyDecoderComponent, &component);
+ success = success && meta->findInt32(kKeyWidth, &decodedWidth);
+ success = success && meta->findInt32(kKeyHeight, &decodedHeight);
+ assert(success);
+
+ if (mSurface.get() != NULL) {
+ mRenderer =
+ new SurfaceRenderer(
+ mSurface, mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight);
+ } else if (format == OMX_COLOR_FormatYUV420Planar
+ && !strncasecmp(component, "OMX.qcom.video.decoder.", 23)) {
+ mRenderer =
+ new QComHardwareRenderer(
+ mISurface, mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight);
+ } else {
+ LOGW("Using software renderer.");
+ mRenderer = new SoftwareRenderer(
+ mISurface, mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight);
+ }
+}
+
+void MediaPlayerImpl::depopulateISurface() {
+ delete mRenderer;
+ mRenderer = NULL;
+}
+
+void MediaPlayerImpl::sendFrameToISurface(MediaBuffer *buffer) {
+ void *platformPrivate;
+ if (!buffer->meta_data()->findPointer(
+ kKeyPlatformPrivate, &platformPrivate)) {
+ platformPrivate = NULL;
+ }
+
+ mRenderer->render(
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length(),
+ platformPrivate);
+}
+
+void MediaPlayerImpl::setAudioSink(
+ const sp<MediaPlayerBase::AudioSink> &audioSink) {
+ LOGI("setAudioSink %p", audioSink.get());
+ mAudioSink = audioSink;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp
new file mode 100644
index 0000000..ec89b74
--- /dev/null
+++ b/media/libstagefright/MediaSource.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+MediaSource::MediaSource() {}
+
+MediaSource::~MediaSource() {}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MediaSource::ReadOptions::ReadOptions() {
+ reset();
+}
+
+void MediaSource::ReadOptions::reset() {
+ mOptions = 0;
+ mSeekTimeUs = 0;
+ mLatenessUs = 0;
+}
+
+void MediaSource::ReadOptions::setSeekTo(int64_t time_us) {
+ mOptions |= kSeekTo_Option;
+ mSeekTimeUs = time_us;
+}
+
+void MediaSource::ReadOptions::clearSeekTo() {
+ mOptions &= ~kSeekTo_Option;
+ mSeekTimeUs = 0;
+}
+
+bool MediaSource::ReadOptions::getSeekTo(int64_t *time_us) const {
+ *time_us = mSeekTimeUs;
+ return (mOptions & kSeekTo_Option) != 0;
+}
+
+void MediaSource::ReadOptions::setLateBy(int64_t lateness_us) {
+ mLatenessUs = lateness_us;
+}
+
+int64_t MediaSource::ReadOptions::getLateBy() const {
+ return mLatenessUs;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp
new file mode 100644
index 0000000..5d23732b
--- /dev/null
+++ b/media/libstagefright/MetaData.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MetaData::MetaData() {
+}
+
+MetaData::MetaData(const MetaData &from)
+ : RefBase(),
+ mItems(from.mItems) {
+}
+
+MetaData::~MetaData() {
+ clear();
+}
+
+void MetaData::clear() {
+ mItems.clear();
+}
+
+bool MetaData::remove(uint32_t key) {
+ ssize_t i = mItems.indexOfKey(key);
+
+ if (i < 0) {
+ return false;
+ }
+
+ mItems.removeItemsAt(i);
+
+ return true;
+}
+
+bool MetaData::setCString(uint32_t key, const char *value) {
+ return setData(key, TYPE_C_STRING, value, strlen(value) + 1);
+}
+
+bool MetaData::setInt32(uint32_t key, int32_t value) {
+ return setData(key, TYPE_INT32, &value, sizeof(value));
+}
+
+bool MetaData::setFloat(uint32_t key, float value) {
+ return setData(key, TYPE_FLOAT, &value, sizeof(value));
+}
+
+bool MetaData::setPointer(uint32_t key, void *value) {
+ return setData(key, TYPE_POINTER, &value, sizeof(value));
+}
+
+bool MetaData::findCString(uint32_t key, const char **value) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (!findData(key, &type, &data, &size) || type != TYPE_C_STRING) {
+ return false;
+ }
+
+ *value = (const char *)data;
+
+ return true;
+}
+
+bool MetaData::findInt32(uint32_t key, int32_t *value) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (!findData(key, &type, &data, &size) || type != TYPE_INT32) {
+ return false;
+ }
+
+ assert(size == sizeof(*value));
+
+ *value = *(int32_t *)data;
+
+ return true;
+}
+
+bool MetaData::findFloat(uint32_t key, float *value) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (!findData(key, &type, &data, &size) || type != TYPE_FLOAT) {
+ return false;
+ }
+
+ assert(size == sizeof(*value));
+
+ *value = *(float *)data;
+
+ return true;
+}
+
+bool MetaData::findPointer(uint32_t key, void **value) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (!findData(key, &type, &data, &size) || type != TYPE_POINTER) {
+ return false;
+ }
+
+ assert(size == sizeof(*value));
+
+ *value = *(void **)data;
+
+ return true;
+}
+
+bool MetaData::setData(
+ uint32_t key, uint32_t type, const void *data, size_t size) {
+ bool overwrote_existing = true;
+
+ ssize_t i = mItems.indexOfKey(key);
+ if (i < 0) {
+ typed_data item;
+ i = mItems.add(key, item);
+
+ overwrote_existing = false;
+ }
+
+ typed_data &item = mItems.editValueAt(i);
+
+ item.setData(type, data, size);
+
+ return overwrote_existing;
+}
+
+bool MetaData::findData(uint32_t key, uint32_t *type,
+ const void **data, size_t *size) const {
+ ssize_t i = mItems.indexOfKey(key);
+
+ if (i < 0) {
+ return false;
+ }
+
+ const typed_data &item = mItems.valueAt(i);
+
+ item.getData(type, data, size);
+
+ return true;
+}
+
+MetaData::typed_data::typed_data()
+ : mType(0),
+ mSize(0) {
+}
+
+MetaData::typed_data::~typed_data() {
+ clear();
+}
+
+MetaData::typed_data::typed_data(const typed_data &from)
+ : mType(from.mType),
+ mSize(0) {
+ allocateStorage(from.mSize);
+ memcpy(storage(), from.storage(), mSize);
+}
+
+MetaData::typed_data &MetaData::typed_data::operator=(
+ const MetaData::typed_data &from) {
+ if (this != &from) {
+ clear();
+ mType = from.mType;
+ allocateStorage(from.mSize);
+ memcpy(storage(), from.storage(), mSize);
+ }
+
+ return *this;
+}
+
+void MetaData::typed_data::clear() {
+ freeStorage();
+
+ mType = 0;
+}
+
+void MetaData::typed_data::setData(
+ uint32_t type, const void *data, size_t size) {
+ clear();
+
+ mType = type;
+ allocateStorage(size);
+ memcpy(storage(), data, size);
+}
+
+void MetaData::typed_data::getData(
+ uint32_t *type, const void **data, size_t *size) const {
+ *type = mType;
+ *size = mSize;
+ *data = storage();
+}
+
+void MetaData::typed_data::allocateStorage(size_t size) {
+ mSize = size;
+
+ if (usesReservoir()) {
+ return;
+ }
+
+ u.ext_data = malloc(mSize);
+}
+
+void MetaData::typed_data::freeStorage() {
+ if (!usesReservoir()) {
+ if (u.ext_data) {
+ free(u.ext_data);
+ }
+ }
+
+ mSize = 0;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MmapSource.cpp b/media/libstagefright/MmapSource.cpp
new file mode 100644
index 0000000..7cb861c
--- /dev/null
+++ b/media/libstagefright/MmapSource.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MmapSource"
+#include <utils/Log.h>
+
+#include <sys/mman.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <fcntl.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <media/stagefright/MmapSource.h>
+
+namespace android {
+
+MmapSource::MmapSource(const char *filename)
+ : mFd(open(filename, O_RDONLY)),
+ mBase(NULL),
+ mSize(0) {
+ LOGV("MmapSource '%s'", filename);
+ assert(mFd >= 0);
+
+ off_t size = lseek(mFd, 0, SEEK_END);
+ mSize = (size_t)size;
+
+ mBase = mmap(0, mSize, PROT_READ, MAP_FILE | MAP_SHARED, mFd, 0);
+
+ if (mBase == (void *)-1) {
+ mBase = NULL;
+
+ close(mFd);
+ mFd = -1;
+ }
+}
+
+MmapSource::MmapSource(int fd, int64_t offset, int64_t length)
+ : mFd(fd),
+ mBase(NULL),
+ mSize(length) {
+ LOGV("MmapSource fd:%d offset:%lld length:%lld", fd, offset, length);
+ assert(fd >= 0);
+
+ mBase = mmap(0, mSize, PROT_READ, MAP_FILE | MAP_SHARED, mFd, offset);
+
+ if (mBase == (void *)-1) {
+ mBase = NULL;
+
+ close(mFd);
+ mFd = -1;
+ }
+
+}
+
+MmapSource::~MmapSource() {
+ if (mFd != -1) {
+ munmap(mBase, mSize);
+ mBase = NULL;
+ mSize = 0;
+
+ close(mFd);
+ mFd = -1;
+ }
+}
+
+status_t MmapSource::InitCheck() const {
+ return mFd == -1 ? NO_INIT : OK;
+}
+
+ssize_t MmapSource::read_at(off_t offset, void *data, size_t size) {
+ LOGV("read_at offset:%ld data:%p size:%d", offset, data, size);
+ assert(offset >= 0);
+
+ size_t avail = 0;
+ if (offset >= 0 && offset < (off_t)mSize) {
+ avail = mSize - offset;
+ }
+
+ if (size > avail) {
+ size = avail;
+ }
+
+ memcpy(data, (const uint8_t *)mBase + offset, size);
+
+ return (ssize_t)size;
+}
+
+status_t MmapSource::getSize(off_t *size) {
+ *size = mSize;
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
new file mode 100644
index 0000000..1bc8a44
--- /dev/null
+++ b/media/libstagefright/OMXClient.cpp
@@ -0,0 +1,369 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXClient"
+#include <utils/Log.h>
+
+#include <sys/socket.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <media/IOMX.h>
+#include <media/stagefright/OMXClient.h>
+
+namespace android {
+
+OMXClient::OMXClient()
+ : mSock(-1) {
+}
+
+OMXClient::~OMXClient() {
+ disconnect();
+}
+
+status_t OMXClient::connect() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mSock >= 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+ assert(service.get() != NULL);
+
+ mOMX = service->createOMX();
+ assert(mOMX.get() != NULL);
+
+#if IOMX_USES_SOCKETS
+ status_t result = mOMX->connect(&mSock);
+ if (result != OK) {
+ mSock = -1;
+
+ mOMX = NULL;
+ return result;
+ }
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ int err = pthread_create(&mThread, &attr, ThreadWrapper, this);
+ assert(err == 0);
+
+ pthread_attr_destroy(&attr);
+#else
+ mReflector = new OMXClientReflector(this);
+#endif
+
+ return OK;
+}
+
+void OMXClient::disconnect() {
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mSock < 0) {
+ return;
+ }
+
+ assert(mObservers.isEmpty());
+ }
+
+#if IOMX_USES_SOCKETS
+ omx_message msg;
+ msg.type = omx_message::DISCONNECT;
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+
+ void *dummy;
+ pthread_join(mThread, &dummy);
+#else
+ mReflector->reset();
+ mReflector.clear();
+#endif
+}
+
+#if IOMX_USES_SOCKETS
+// static
+void *OMXClient::ThreadWrapper(void *me) {
+ ((OMXClient *)me)->threadEntry();
+
+ return NULL;
+}
+
+void OMXClient::threadEntry() {
+ bool done = false;
+ while (!done) {
+ omx_message msg;
+ ssize_t n = recv(mSock, &msg, sizeof(msg), 0);
+
+ if (n <= 0) {
+ break;
+ }
+
+ done = onOMXMessage(msg);
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ close(mSock);
+ mSock = -1;
+}
+#endif
+
+status_t OMXClient::fillBuffer(IOMX::node_id node, IOMX::buffer_id buffer) {
+#if !IOMX_USES_SOCKETS
+ mOMX->fill_buffer(node, buffer);
+#else
+ if (mSock < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ omx_message msg;
+ msg.type = omx_message::FILL_BUFFER;
+ msg.u.buffer_data.node = node;
+ msg.u.buffer_data.buffer = buffer;
+
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OK;
+}
+
+status_t OMXClient::emptyBuffer(
+ IOMX::node_id node, IOMX::buffer_id buffer,
+ OMX_U32 range_offset, OMX_U32 range_length,
+ OMX_U32 flags, OMX_TICKS timestamp) {
+#if !IOMX_USES_SOCKETS
+ mOMX->empty_buffer(
+ node, buffer, range_offset, range_length, flags, timestamp);
+#else
+ if (mSock < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ // XXX I don't like all this copying...
+
+ omx_message msg;
+ msg.type = omx_message::EMPTY_BUFFER;
+ msg.u.extended_buffer_data.node = node;
+ msg.u.extended_buffer_data.buffer = buffer;
+ msg.u.extended_buffer_data.range_offset = range_offset;
+ msg.u.extended_buffer_data.range_length = range_length;
+ msg.u.extended_buffer_data.flags = flags;
+ msg.u.extended_buffer_data.timestamp = timestamp;
+
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OK;
+}
+
+status_t OMXClient::send_command(
+ IOMX::node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
+#if !IOMX_USES_SOCKETS
+ return mOMX->send_command(node, cmd, param);
+#else
+ if (mSock < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ omx_message msg;
+ msg.type = omx_message::SEND_COMMAND;
+ msg.u.send_command_data.node = node;
+ msg.u.send_command_data.cmd = cmd;
+ msg.u.send_command_data.param = param;
+
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OK;
+}
+
+status_t OMXClient::registerObserver(
+ IOMX::node_id node, OMXObserver *observer) {
+ Mutex::Autolock autoLock(&mLock);
+
+ ssize_t index = mObservers.indexOfKey(node);
+ if (index >= 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ mObservers.add(node, observer);
+ observer->start();
+
+#if !IOMX_USES_SOCKETS
+ mOMX->observe_node(node, mReflector);
+#endif
+
+ return OK;
+}
+
+void OMXClient::unregisterObserver(IOMX::node_id node) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mObservers.indexOfKey(node);
+ assert(index >= 0);
+
+ if (index < 0) {
+ return;
+ }
+
+ OMXObserver *observer = mObservers.valueAt(index);
+ observer->stop();
+ mObservers.removeItemsAt(index);
+}
+
+bool OMXClient::onOMXMessage(const omx_message &msg) {
+ bool done = false;
+
+ switch (msg.type) {
+ case omx_message::EVENT:
+ {
+ LOGV("OnEvent node:%p event:%d data1:%ld data2:%ld",
+ msg.u.event_data.node,
+ msg.u.event_data.event,
+ msg.u.event_data.data1,
+ msg.u.event_data.data2);
+
+ break;
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ {
+ LOGV("FillBufferDone %p", msg.u.extended_buffer_data.buffer);
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ LOGV("EmptyBufferDone %p", msg.u.buffer_data.buffer);
+ break;
+ }
+
+#if IOMX_USES_SOCKETS
+ case omx_message::DISCONNECTED:
+ {
+ LOGV("Disconnected");
+ done = true;
+ break;
+ }
+#endif
+
+ default:
+ LOGE("received unknown omx_message type %d", msg.type);
+ break;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ ssize_t index = mObservers.indexOfKey(msg.u.buffer_data.node);
+
+ if (index >= 0) {
+ mObservers.editValueAt(index)->postMessage(msg);
+ }
+
+ return done;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+OMXObserver::OMXObserver() {
+}
+
+OMXObserver::~OMXObserver() {
+}
+
+void OMXObserver::start() {
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ int err = pthread_create(&mThread, &attr, ThreadWrapper, this);
+ assert(err == 0);
+
+ pthread_attr_destroy(&attr);
+}
+
+void OMXObserver::stop() {
+ omx_message msg;
+ msg.type = omx_message::QUIT_OBSERVER;
+ postMessage(msg);
+
+ void *dummy;
+ pthread_join(mThread, &dummy);
+}
+
+void OMXObserver::postMessage(const omx_message &msg) {
+ Mutex::Autolock autoLock(mLock);
+ mQueue.push_back(msg);
+ mQueueNotEmpty.signal();
+}
+
+// static
+void *OMXObserver::ThreadWrapper(void *me) {
+ static_cast<OMXObserver *>(me)->threadEntry();
+
+ return NULL;
+}
+
+void OMXObserver::threadEntry() {
+ for (;;) {
+ omx_message msg;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ while (mQueue.empty()) {
+ mQueueNotEmpty.wait(mLock);
+ }
+
+ msg = *mQueue.begin();
+ mQueue.erase(mQueue.begin());
+ }
+
+ if (msg.type == omx_message::QUIT_OBSERVER) {
+ break;
+ }
+
+ onOMXMessage(msg);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+OMXClientReflector::OMXClientReflector(OMXClient *client)
+ : mClient(client) {
+}
+
+void OMXClientReflector::on_message(const omx_message &msg) {
+ if (mClient != NULL) {
+ mClient->onOMXMessage(msg);
+ }
+}
+
+void OMXClientReflector::reset() {
+ mClient = NULL;
+}
+
+} // namespace android
diff --git a/media/libstagefright/OMXDecoder.cpp b/media/libstagefright/OMXDecoder.cpp
new file mode 100644
index 0000000..c059a9d
--- /dev/null
+++ b/media/libstagefright/OMXDecoder.cpp
@@ -0,0 +1,1329 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXDecoder"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <OMX_Component.h>
+
+#include <media/stagefright/ESDS.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXDecoder.h>
+
+namespace android {
+
+class OMXMediaBuffer : public MediaBuffer {
+public:
+ OMXMediaBuffer(IOMX::buffer_id buffer_id, const sp<IMemory> &mem)
+ : MediaBuffer(mem->pointer(),
+ mem->size()),
+ mBufferID(buffer_id),
+ mMem(mem) {
+ }
+
+ IOMX::buffer_id buffer_id() const { return mBufferID; }
+
+private:
+ IOMX::buffer_id mBufferID;
+ sp<IMemory> mMem;
+
+ OMXMediaBuffer(const OMXMediaBuffer &);
+ OMXMediaBuffer &operator=(const OMXMediaBuffer &);
+};
+
+struct CodecInfo {
+ const char *mime;
+ const char *codec;
+};
+
+static const CodecInfo kDecoderInfo[] = {
+ { "audio/mpeg", "OMX.PV.mp3dec" },
+ { "audio/3gpp", "OMX.PV.amrdec" },
+ { "audio/mp4a-latm", "OMX.PV.aacdec" },
+ { "video/mp4v-es", "OMX.qcom.video.decoder.mpeg4" },
+ { "video/mp4v-es", "OMX.PV.mpeg4dec" },
+ { "video/3gpp", "OMX.qcom.video.decoder.h263" },
+ { "video/3gpp", "OMX.PV.h263dec" },
+ { "video/avc", "OMX.qcom.video.decoder.avc" },
+ { "video/avc", "OMX.PV.avcdec" },
+};
+
+static const CodecInfo kEncoderInfo[] = {
+ { "audio/3gpp", "OMX.PV.amrencnb" },
+ { "audio/mp4a-latm", "OMX.PV.aacenc" },
+ { "video/mp4v-es", "OMX.qcom.video.encoder.mpeg4" },
+ { "video/mp4v-es", "OMX.PV.mpeg4enc" },
+ { "video/3gpp", "OMX.qcom.video.encoder.h263" },
+ { "video/3gpp", "OMX.PV.h263enc" },
+ { "video/avc", "OMX.PV.avcenc" },
+};
+
+static const char *GetCodec(const CodecInfo *info, size_t numInfos,
+ const char *mime, int index) {
+ assert(index >= 0);
+ for(size_t i = 0; i < numInfos; ++i) {
+ if (!strcasecmp(mime, info[i].mime)) {
+ if (index == 0) {
+ return info[i].codec;
+ }
+
+ --index;
+ }
+ }
+
+ return NULL;
+}
+
+// static
+OMXDecoder *OMXDecoder::Create(OMXClient *client, const sp<MetaData> &meta) {
+ const char *mime;
+ bool success = meta->findCString(kKeyMIMEType, &mime);
+ assert(success);
+
+ sp<IOMX> omx = client->interface();
+
+ const char *codec = NULL;
+ IOMX::node_id node = 0;
+ for (int index = 0;; ++index) {
+ codec = GetCodec(
+ kDecoderInfo, sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]),
+ mime, index);
+
+ if (!codec) {
+ return NULL;
+ }
+
+ LOGI("Attempting to allocate OMX node '%s'", codec);
+
+ status_t err = omx->allocate_node(codec, &node);
+ if (err == OK) {
+ break;
+ }
+ }
+
+ OMXDecoder *decoder = new OMXDecoder(client, node, mime, codec);
+
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds((const char *)data, size);
+ assert(esds.InitCheck() == OK);
+
+ const void *codec_specific_data;
+ size_t codec_specific_data_size;
+ esds.getCodecSpecificInfo(
+ &codec_specific_data, &codec_specific_data_size);
+
+ printf("found codec specific data of size %d\n",
+ codec_specific_data_size);
+
+ decoder->addCodecSpecificData(
+ codec_specific_data, codec_specific_data_size);
+ } else if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ printf("found avcc of size %d\n", size);
+
+ const uint8_t *ptr = (const uint8_t *)data + 6;
+ size -= 6;
+ while (size >= 2) {
+ size_t length = ptr[0] << 8 | ptr[1];
+
+ ptr += 2;
+ size -= 2;
+
+ // printf("length = %d, size = %d\n", length, size);
+
+ assert(size >= length);
+
+ decoder->addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+
+ if (size <= 1) {
+ break;
+ }
+
+ ptr++; // XXX skip trailing 0x01 byte???
+ --size;
+ }
+ }
+
+ return decoder;
+}
+
+// static
+OMXDecoder *OMXDecoder::CreateEncoder(
+ OMXClient *client, const sp<MetaData> &meta) {
+ const char *mime;
+ bool success = meta->findCString(kKeyMIMEType, &mime);
+ assert(success);
+
+ sp<IOMX> omx = client->interface();
+
+ const char *codec = NULL;
+ IOMX::node_id node = 0;
+ for (int index = 0;; ++index) {
+ codec = GetCodec(
+ kEncoderInfo, sizeof(kEncoderInfo) / sizeof(kEncoderInfo[0]),
+ mime, index);
+
+ if (!codec) {
+ return NULL;
+ }
+
+ LOGI("Attempting to allocate OMX node '%s'", codec);
+
+ status_t err = omx->allocate_node(codec, &node);
+ if (err == OK) {
+ break;
+ }
+ }
+
+ OMXDecoder *encoder = new OMXDecoder(client, node, mime, codec);
+
+ return encoder;
+}
+
+OMXDecoder::OMXDecoder(OMXClient *client, IOMX::node_id node,
+ const char *mime, const char *codec)
+ : mClient(client),
+ mOMX(mClient->interface()),
+ mNode(node),
+ mComponentName(strdup(codec)),
+ mIsMP3(!strcasecmp(mime, "audio/mpeg")),
+ mSource(NULL),
+ mCodecSpecificDataIterator(mCodecSpecificData.begin()),
+ mState(OMX_StateLoaded),
+ mPortStatusMask(kPortStatusActive << 2 | kPortStatusActive),
+ mShutdownInitiated(false),
+ mDealer(new MemoryDealer(3 * 1024 * 1024)),
+ mSeeking(false),
+ mStarted(false),
+ mErrorCondition(OK),
+ mReachedEndOfInput(false) {
+ mClient->registerObserver(mNode, this);
+
+ mBuffers.push(); // input buffers
+ mBuffers.push(); // output buffers
+}
+
+OMXDecoder::~OMXDecoder() {
+ if (mStarted) {
+ stop();
+ }
+
+ for (List<CodecSpecificData>::iterator it = mCodecSpecificData.begin();
+ it != mCodecSpecificData.end(); ++it) {
+ free((*it).data);
+ }
+ mCodecSpecificData.clear();
+
+ mClient->unregisterObserver(mNode);
+
+ status_t err = mOMX->free_node(mNode);
+ assert(err == OK);
+ mNode = 0;
+
+ free(mComponentName);
+ mComponentName = NULL;
+}
+
+void OMXDecoder::setSource(MediaSource *source) {
+ Mutex::Autolock autoLock(mLock);
+
+ assert(mSource == NULL);
+
+ mSource = source;
+ setup();
+}
+
+status_t OMXDecoder::start(MetaData *) {
+ assert(!mStarted);
+
+ // mDealer->dump("Decoder Dealer");
+
+ sp<MetaData> params = new MetaData;
+ if (!strcmp(mComponentName, "OMX.qcom.video.decoder.avc")) {
+ params->setInt32(kKeyNeedsNALFraming, true);
+ }
+
+ status_t err = mSource->start(params.get());
+
+ if (err != OK) {
+ return err;
+ }
+
+ postStart();
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t OMXDecoder::stop() {
+ assert(mStarted);
+
+ LOGI("Initiating OMX Node shutdown, busy polling.");
+ initiateShutdown();
+
+ // Important: initiateShutdown must be called first, _then_ release
+ // buffers we're holding onto.
+ while (!mOutputBuffers.empty()) {
+ MediaBuffer *buffer = *mOutputBuffers.begin();
+ mOutputBuffers.erase(mOutputBuffers.begin());
+
+ LOGV("releasing buffer %p.", buffer->data());
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ int attempt = 1;
+ while (mState != OMX_StateLoaded && attempt < 10) {
+ usleep(100000);
+
+ ++attempt;
+ }
+
+ if (mState != OMX_StateLoaded) {
+ LOGE("!!! OMX Node '%s' did NOT shutdown cleanly !!!", mComponentName);
+ } else {
+ LOGI("OMX Node '%s' has shutdown cleanly.", mComponentName);
+ }
+
+ mSource->stop();
+
+ mCodecSpecificDataIterator = mCodecSpecificData.begin();
+ mShutdownInitiated = false;
+ mSeeking = false;
+ mStarted = false;
+ mErrorCondition = OK;
+ mReachedEndOfInput = false;
+
+ return OK;
+}
+
+sp<MetaData> OMXDecoder::getFormat() {
+ return mOutputFormat;
+}
+
+status_t OMXDecoder::read(
+ MediaBuffer **out, const ReadOptions *options) {
+ assert(mStarted);
+
+ *out = NULL;
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mErrorCondition != OK && mErrorCondition != ERROR_END_OF_STREAM) {
+ // Errors are sticky.
+ return mErrorCondition;
+ }
+
+ int64_t seekTimeUs;
+ if (options && options->getSeekTo(&seekTimeUs)) {
+ LOGI("[%s] seeking to %lld us", mComponentName, seekTimeUs);
+
+ mErrorCondition = OK;
+ mReachedEndOfInput = false;
+
+ setPortStatus(kPortIndexInput, kPortStatusFlushing);
+ setPortStatus(kPortIndexOutput, kPortStatusFlushing);
+
+ mSeeking = true;
+ mSeekTimeUs = seekTimeUs;
+
+ while (!mOutputBuffers.empty()) {
+ OMXMediaBuffer *buffer =
+ static_cast<OMXMediaBuffer *>(*mOutputBuffers.begin());
+
+ // We could have used buffer->release() instead, but we're
+ // holding the lock and signalBufferReturned attempts to acquire
+ // the lock.
+ buffer->claim();
+ mBuffers.editItemAt(
+ kPortIndexOutput).push_back(buffer->buffer_id());
+ buffer = NULL;
+
+ mOutputBuffers.erase(mOutputBuffers.begin());
+ }
+
+ status_t err = mOMX->send_command(mNode, OMX_CommandFlush, -1);
+ assert(err == OK);
+
+ // Once flushing is completed buffers will again be scheduled to be
+ // filled/emptied.
+ }
+
+ while (mErrorCondition == OK && mOutputBuffers.empty()) {
+ mOutputBufferAvailable.wait(mLock);
+ }
+
+ if (!mOutputBuffers.empty()) {
+ MediaBuffer *buffer = *mOutputBuffers.begin();
+ mOutputBuffers.erase(mOutputBuffers.begin());
+
+ *out = buffer;
+
+ return OK;
+ } else {
+ assert(mErrorCondition != OK);
+ return mErrorCondition;
+ }
+}
+
+void OMXDecoder::addCodecSpecificData(const void *data, size_t size) {
+ CodecSpecificData specific;
+ specific.data = malloc(size);
+ memcpy(specific.data, data, size);
+ specific.size = size;
+
+ mCodecSpecificData.push_back(specific);
+ mCodecSpecificDataIterator = mCodecSpecificData.begin();
+}
+
+void OMXDecoder::onOMXMessage(const omx_message &msg) {
+ Mutex::Autolock autoLock(mLock);
+
+ switch (msg.type) {
+ case omx_message::START:
+ {
+ onStart();
+ break;
+ }
+
+ case omx_message::EVENT:
+ {
+ onEvent(msg.u.event_data.event, msg.u.event_data.data1,
+ msg.u.event_data.data2);
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ onEmptyBufferDone(msg.u.buffer_data.buffer);
+ break;
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ case omx_message::INITIAL_FILL_BUFFER:
+ {
+ onFillBufferDone(msg);
+ break;
+ }
+
+ default:
+ LOGE("received unknown omx_message type %d", msg.type);
+ break;
+ }
+}
+
+void OMXDecoder::setAMRFormat() {
+ OMX_AUDIO_PARAM_AMRTYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = kPortIndexInput;
+
+ status_t err =
+ mOMX->get_parameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+
+ assert(err == NO_ERROR);
+
+ def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
+ def.eAMRBandMode = OMX_AUDIO_AMRBandModeNB0;
+
+ err = mOMX->set_parameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::setAACFormat() {
+ OMX_AUDIO_PARAM_AACPROFILETYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = kPortIndexInput;
+
+ status_t err =
+ mOMX->get_parameter(mNode, OMX_IndexParamAudioAac, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ def.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS;
+
+ err = mOMX->set_parameter(mNode, OMX_IndexParamAudioAac, &def, sizeof(def));
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::setVideoOutputFormat(OMX_U32 width, OMX_U32 height) {
+ LOGI("setVideoOutputFormat width=%ld, height=%ld", width, height);
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ bool is_encoder = strstr(mComponentName, ".encoder.") != NULL; // XXX
+
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = is_encoder ? kPortIndexOutput : kPortIndexInput;
+
+ status_t err = mOMX->get_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ assert(err == NO_ERROR);
+
+#if 1
+ // XXX Need a (much) better heuristic to compute input buffer sizes.
+ const size_t X = 64 * 1024;
+ if (def.nBufferSize < X) {
+ def.nBufferSize = X;
+ }
+#endif
+
+ assert(def.eDomain == OMX_PortDomainVideo);
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ // video_def.eCompressionFormat = OMX_VIDEO_CodingAVC;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+ err = mOMX->set_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ ////////////////////////////////////////////////////////////////////////////
+
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = is_encoder ? kPortIndexInput : kPortIndexOutput;
+
+ err = mOMX->get_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ assert(def.eDomain == OMX_PortDomainVideo);
+
+ def.nBufferSize =
+ (((width + 15) & -16) * ((height + 15) & -16) * 3) / 2; // YUV420
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ video_def->nStride = width;
+ // video_def->nSliceHeight = height;
+ video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
+// video_def->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+
+ err = mOMX->set_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::setup() {
+ const sp<MetaData> &meta = mSource->getFormat();
+
+ const char *mime;
+ bool success = meta->findCString(kKeyMIMEType, &mime);
+ assert(success);
+
+ if (!strcasecmp(mime, "audio/3gpp")) {
+ setAMRFormat();
+ } else if (!strcasecmp(mime, "audio/mp4a-latm")) {
+ setAACFormat();
+ } else if (!strncasecmp(mime, "video/", 6)) {
+ int32_t width, height;
+ bool success = meta->findInt32(kKeyWidth, &width);
+ success = success && meta->findInt32(kKeyHeight, &height);
+ assert(success);
+
+ setVideoOutputFormat(width, height);
+ }
+
+ // dumpPortDefinition(0);
+ // dumpPortDefinition(1);
+
+ mOutputFormat = new MetaData;
+ mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->get_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ switch (def.eDomain) {
+ case OMX_PortDomainAudio:
+ {
+ OMX_AUDIO_PORTDEFINITIONTYPE *audio_def = &def.format.audio;
+
+ assert(audio_def->eEncoding == OMX_AUDIO_CodingPCM);
+
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ params.nSize = sizeof(params);
+ params.nVersion.s.nVersionMajor = 1;
+ params.nVersion.s.nVersionMinor = 1;
+ params.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->get_parameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ assert(err == OK);
+
+ assert(params.eNumData == OMX_NumericalDataSigned);
+ assert(params.nBitPerSample == 16);
+ assert(params.ePCMMode == OMX_AUDIO_PCMModeLinear);
+
+ int32_t numChannels, sampleRate;
+ meta->findInt32(kKeyChannelCount, &numChannels);
+ meta->findInt32(kKeySampleRate, &sampleRate);
+
+ mOutputFormat->setCString(kKeyMIMEType, "audio/raw");
+ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
+ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
+ break;
+ }
+
+ case OMX_PortDomainVideo:
+ {
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ if (video_def->eCompressionFormat == OMX_VIDEO_CodingUnused) {
+ mOutputFormat->setCString(kKeyMIMEType, "video/raw");
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingMPEG4) {
+ mOutputFormat->setCString(kKeyMIMEType, "video/mp4v-es");
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingH263) {
+ mOutputFormat->setCString(kKeyMIMEType, "video/3gpp");
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingAVC) {
+ mOutputFormat->setCString(kKeyMIMEType, "video/avc");
+ } else {
+ assert(!"Unknown compression format.");
+ }
+
+ if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
+ // This component appears to be lying to me.
+ mOutputFormat->setInt32(
+ kKeyWidth, (video_def->nFrameWidth + 15) & -16);
+ mOutputFormat->setInt32(
+ kKeyHeight, (video_def->nFrameHeight + 15) & -16);
+ } else {
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+ }
+
+ mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
+ break;
+ }
+
+ default:
+ {
+ assert(!"should not be here, neither audio nor video.");
+ break;
+ }
+ }
+}
+
+void OMXDecoder::onStart() {
+ bool needs_qcom_hack =
+ !strncmp(mComponentName, "OMX.qcom.video.", 15);
+
+ if (!needs_qcom_hack) {
+ status_t err =
+ mOMX->send_command(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ assert(err == NO_ERROR);
+ }
+
+ allocateBuffers(kPortIndexInput);
+ allocateBuffers(kPortIndexOutput);
+
+ if (needs_qcom_hack) {
+ // XXX this should happen before AllocateBuffers, but qcom's
+ // h264 vdec disagrees.
+ status_t err =
+ mOMX->send_command(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ assert(err == NO_ERROR);
+ }
+}
+
+void OMXDecoder::allocateBuffers(OMX_U32 port_index) {
+ assert(mBuffers[port_index].empty());
+
+ OMX_U32 num_buffers;
+ OMX_U32 buffer_size;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nVersion.s.nRevision = 0;
+ def.nVersion.s.nStep = 0;
+ def.nPortIndex = port_index;
+
+ status_t err = mOMX->get_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ num_buffers = def.nBufferCountActual;
+ buffer_size = def.nBufferSize;
+
+ LOGV("[%s] port %ld: allocating %ld buffers of size %ld each\n",
+ mComponentName, port_index, num_buffers, buffer_size);
+
+ for (OMX_U32 i = 0; i < num_buffers; ++i) {
+ sp<IMemory> mem = mDealer->allocate(buffer_size);
+ assert(mem.get() != NULL);
+
+ IOMX::buffer_id buffer;
+ status_t err;
+
+ if (port_index == kPortIndexInput
+ && !strncmp(mComponentName, "OMX.qcom.video.encoder.", 23)) {
+ // qcom's H.263 encoder appears to want to allocate its own input
+ // buffers.
+ err = mOMX->allocate_buffer_with_backup(mNode, port_index, mem, &buffer);
+ if (err != OK) {
+ LOGE("[%s] allocate_buffer_with_backup failed with error %d",
+ mComponentName, err);
+ }
+ } else if (port_index == kPortIndexOutput
+ && !strncmp(mComponentName, "OMX.qcom.video.decoder.", 23)) {
+#if 1
+ err = mOMX->allocate_buffer_with_backup(mNode, port_index, mem, &buffer);
+#else
+ // XXX This is fine as long as we are either running the player
+ // inside the media server process or we are using the
+ // QComHardwareRenderer to output the frames.
+ err = mOMX->allocate_buffer(mNode, port_index, buffer_size, &buffer);
+#endif
+ if (err != OK) {
+ LOGE("[%s] allocate_buffer_with_backup failed with error %d",
+ mComponentName, err);
+ }
+ } else {
+ err = mOMX->use_buffer(mNode, port_index, mem, &buffer);
+ if (err != OK) {
+ LOGE("[%s] use_buffer failed with error %d",
+ mComponentName, err);
+ }
+ }
+ assert(err == OK);
+
+ LOGV("allocated %s buffer %p.",
+ port_index == kPortIndexInput ? "INPUT" : "OUTPUT",
+ buffer);
+
+ mBuffers.editItemAt(port_index).push_back(buffer);
+ mBufferMap.add(buffer, mem);
+
+ if (port_index == kPortIndexOutput) {
+ OMXMediaBuffer *media_buffer = new OMXMediaBuffer(buffer, mem);
+ media_buffer->setObserver(this);
+
+ mMediaBufferMap.add(buffer, media_buffer);
+ }
+ }
+
+ LOGV("allocate %s buffers done.",
+ port_index == kPortIndexInput ? "INPUT" : "OUTPUT");
+}
+
+void OMXDecoder::onEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ LOGV("[%s] onEvent event=%d, data1=%ld, data2=%ld",
+ mComponentName, event, data1, data2);
+
+ switch (event) {
+ case OMX_EventCmdComplete: {
+ onEventCmdComplete(
+ static_cast<OMX_COMMANDTYPE>(data1), data2);
+
+ break;
+ }
+
+ case OMX_EventPortSettingsChanged: {
+ onEventPortSettingsChanged(data1);
+ break;
+ }
+
+ case OMX_EventBufferFlag: {
+ // initiateShutdown();
+ break;
+ }
+
+ default:
+ break;
+ }
+}
+
+void OMXDecoder::onEventCmdComplete(OMX_COMMANDTYPE type, OMX_U32 data) {
+ switch (type) {
+ case OMX_CommandStateSet: {
+ OMX_STATETYPE state = static_cast<OMX_STATETYPE>(data);
+ onStateChanged(state);
+ break;
+ }
+
+ case OMX_CommandPortDisable: {
+ OMX_U32 port_index = data;
+ assert(getPortStatus(port_index) == kPortStatusDisabled);
+
+ status_t err =
+ mOMX->send_command(mNode, OMX_CommandPortEnable, port_index);
+
+ allocateBuffers(port_index);
+
+ break;
+ }
+
+ case OMX_CommandPortEnable: {
+ OMX_U32 port_index = data;
+ assert(getPortStatus(port_index) ==kPortStatusDisabled);
+ setPortStatus(port_index, kPortStatusActive);
+
+ assert(port_index == kPortIndexOutput);
+
+ BufferList *obuffers = &mBuffers.editItemAt(kPortIndexOutput);
+ while (!obuffers->empty()) {
+ IOMX::buffer_id buffer = *obuffers->begin();
+ obuffers->erase(obuffers->begin());
+
+ status_t err = mClient->fillBuffer(mNode, buffer);
+ assert(err == NO_ERROR);
+ }
+
+ break;
+ }
+
+ case OMX_CommandFlush: {
+ OMX_U32 port_index = data;
+ LOGV("Port %ld flush complete.", port_index);
+ assert(getPortStatus(port_index) == kPortStatusFlushing);
+
+ setPortStatus(port_index, kPortStatusActive);
+ BufferList *buffers = &mBuffers.editItemAt(port_index);
+ while (!buffers->empty()) {
+ IOMX::buffer_id buffer = *buffers->begin();
+ buffers->erase(buffers->begin());
+
+ if (port_index == kPortIndexInput) {
+ postEmptyBufferDone(buffer);
+ } else {
+ postInitialFillBuffer(buffer);
+ }
+ }
+ break;
+ }
+
+ default:
+ break;
+ }
+}
+
+void OMXDecoder::onEventPortSettingsChanged(OMX_U32 port_index) {
+ assert(getPortStatus(port_index) == kPortStatusActive);
+ setPortStatus(port_index, kPortStatusDisabled);
+
+ status_t err =
+ mOMX->send_command(mNode, OMX_CommandPortDisable, port_index);
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::onStateChanged(OMX_STATETYPE to) {
+ if (mState == OMX_StateLoaded) {
+ assert(to == OMX_StateIdle);
+
+ mState = to;
+
+ status_t err =
+ mOMX->send_command(mNode, OMX_CommandStateSet, OMX_StateExecuting);
+ assert(err == NO_ERROR);
+ } else if (mState == OMX_StateIdle) {
+ if (to == OMX_StateExecuting) {
+ mState = to;
+
+ BufferList *ibuffers = &mBuffers.editItemAt(kPortIndexInput);
+ while (!ibuffers->empty()) {
+ IOMX::buffer_id buffer = *ibuffers->begin();
+ ibuffers->erase(ibuffers->begin());
+
+ postEmptyBufferDone(buffer);
+ }
+
+ BufferList *obuffers = &mBuffers.editItemAt(kPortIndexOutput);
+ while (!obuffers->empty()) {
+ IOMX::buffer_id buffer = *obuffers->begin();
+ obuffers->erase(obuffers->begin());
+
+ postInitialFillBuffer(buffer);
+ }
+ } else {
+ assert(to == OMX_StateLoaded);
+
+ mState = to;
+
+ setPortStatus(kPortIndexInput, kPortStatusActive);
+ setPortStatus(kPortIndexOutput, kPortStatusActive);
+ }
+ } else if (mState == OMX_StateExecuting) {
+ assert(to == OMX_StateIdle);
+
+ mState = to;
+
+ LOGV("Executing->Idle complete, initiating Idle->Loaded");
+ status_t err =
+ mClient->send_command(mNode, OMX_CommandStateSet, OMX_StateLoaded);
+ assert(err == NO_ERROR);
+
+ BufferList *ibuffers = &mBuffers.editItemAt(kPortIndexInput);
+ for (BufferList::iterator it = ibuffers->begin();
+ it != ibuffers->end(); ++it) {
+ freeInputBuffer(*it);
+ }
+ ibuffers->clear();
+
+ BufferList *obuffers = &mBuffers.editItemAt(kPortIndexOutput);
+ for (BufferList::iterator it = obuffers->begin();
+ it != obuffers->end(); ++it) {
+ freeOutputBuffer(*it);
+ }
+ obuffers->clear();
+ }
+}
+
+void OMXDecoder::initiateShutdown() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mShutdownInitiated) {
+ return;
+ }
+
+ if (mState == OMX_StateLoaded) {
+ return;
+ }
+
+ assert(mState == OMX_StateExecuting);
+
+ mShutdownInitiated = true;
+
+ status_t err =
+ mClient->send_command(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ assert(err == NO_ERROR);
+
+ setPortStatus(kPortIndexInput, kPortStatusShutdown);
+ setPortStatus(kPortIndexOutput, kPortStatusShutdown);
+}
+
+void OMXDecoder::setPortStatus(OMX_U32 port_index, PortStatus status) {
+ int shift = 2 * port_index;
+
+ mPortStatusMask &= ~(3 << shift);
+ mPortStatusMask |= status << shift;
+}
+
+OMXDecoder::PortStatus OMXDecoder::getPortStatus(
+ OMX_U32 port_index) const {
+ int shift = 2 * port_index;
+
+ return static_cast<PortStatus>((mPortStatusMask >> shift) & 3);
+}
+
+void OMXDecoder::onEmptyBufferDone(IOMX::buffer_id buffer) {
+ LOGV("[%s] onEmptyBufferDone (%p)", mComponentName, buffer);
+
+ status_t err;
+ switch (getPortStatus(kPortIndexInput)) {
+ case kPortStatusDisabled:
+ freeInputBuffer(buffer);
+ err = NO_ERROR;
+ break;
+
+ case kPortStatusShutdown:
+ LOGV("We're shutting down, enqueue INPUT buffer %p.", buffer);
+ mBuffers.editItemAt(kPortIndexInput).push_back(buffer);
+ err = NO_ERROR;
+ break;
+
+ case kPortStatusFlushing:
+ LOGV("We're currently flushing, enqueue INPUT buffer %p.", buffer);
+ mBuffers.editItemAt(kPortIndexInput).push_back(buffer);
+ err = NO_ERROR;
+ break;
+
+ default:
+ onRealEmptyBufferDone(buffer);
+ err = NO_ERROR;
+ break;
+ }
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::onFillBufferDone(const omx_message &msg) {
+ IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
+
+ LOGV("[%s] onFillBufferDone (%p)", mComponentName, buffer);
+
+ status_t err;
+ switch (getPortStatus(kPortIndexOutput)) {
+ case kPortStatusDisabled:
+ freeOutputBuffer(buffer);
+ err = NO_ERROR;
+ break;
+ case kPortStatusShutdown:
+ LOGV("We're shutting down, enqueue OUTPUT buffer %p.", buffer);
+ mBuffers.editItemAt(kPortIndexOutput).push_back(buffer);
+ err = NO_ERROR;
+ break;
+
+ case kPortStatusFlushing:
+ LOGV("We're currently flushing, enqueue OUTPUT buffer %p.", buffer);
+ mBuffers.editItemAt(kPortIndexOutput).push_back(buffer);
+ err = NO_ERROR;
+ break;
+
+ default:
+ {
+ if (msg.type == omx_message::INITIAL_FILL_BUFFER) {
+ err = mClient->fillBuffer(mNode, buffer);
+ } else {
+ LOGV("[%s] Filled OUTPUT buffer %p, flags=0x%08lx.",
+ mComponentName, buffer, msg.u.extended_buffer_data.flags);
+
+ onRealFillBufferDone(msg);
+ err = NO_ERROR;
+ }
+ break;
+ }
+ }
+ assert(err == NO_ERROR);
+}
+
+void OMXDecoder::onRealEmptyBufferDone(IOMX::buffer_id buffer) {
+ if (mReachedEndOfInput) {
+ // We already sent the EOS notification.
+
+ mBuffers.editItemAt(kPortIndexInput).push_back(buffer);
+ return;
+ }
+
+ const sp<IMemory> mem = mBufferMap.valueFor(buffer);
+ assert(mem.get() != NULL);
+
+ static const uint8_t kNALStartCode[4] = { 0x00, 0x00, 0x00, 0x01 };
+
+ if (mCodecSpecificDataIterator != mCodecSpecificData.end()) {
+ List<CodecSpecificData>::iterator it = mCodecSpecificDataIterator;
+
+ size_t range_length = 0;
+
+ if (!strcmp(mComponentName, "OMX.qcom.video.decoder.avc")) {
+ assert((*mCodecSpecificDataIterator).size + 4 <= mem->size());
+
+ memcpy(mem->pointer(), kNALStartCode, 4);
+
+ memcpy((uint8_t *)mem->pointer() + 4, (*it).data, (*it).size);
+ range_length = (*it).size + 4;
+ } else {
+ assert((*mCodecSpecificDataIterator).size <= mem->size());
+
+ memcpy((uint8_t *)mem->pointer(), (*it).data, (*it).size);
+ range_length = (*it).size;
+ }
+
+ ++mCodecSpecificDataIterator;
+
+ status_t err = mClient->emptyBuffer(
+ mNode, buffer, 0, range_length,
+ OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
+ 0);
+
+ assert(err == NO_ERROR);
+
+ return;
+ }
+
+ LOGV("[%s] waiting for input data", mComponentName);
+
+ MediaBuffer *input_buffer;
+ for (;;) {
+ status_t err;
+
+ if (mSeeking) {
+ MediaSource::ReadOptions options;
+ options.setSeekTo(mSeekTimeUs);
+
+ mSeeking = false;
+
+ err = mSource->read(&input_buffer, &options);
+ } else {
+ err = mSource->read(&input_buffer);
+ }
+ assert((err == OK && input_buffer != NULL)
+ || (err != OK && input_buffer == NULL));
+
+ if (err == ERROR_END_OF_STREAM) {
+ LOGE("[%s] Reached end of stream.", mComponentName);
+ mReachedEndOfInput = true;
+ } else {
+ LOGV("[%s] got input data", mComponentName);
+ }
+
+ if (err != OK) {
+ status_t err2 = mClient->emptyBuffer(
+ mNode, buffer, 0, 0, OMX_BUFFERFLAG_EOS, 0);
+
+ assert(err2 == NO_ERROR);
+ return;
+ }
+
+ if (mSeeking) {
+ input_buffer->release();
+ input_buffer = NULL;
+
+ continue;
+ }
+
+ break;
+ }
+
+ const uint8_t *src_data =
+ (const uint8_t *)input_buffer->data() + input_buffer->range_offset();
+
+ size_t src_length = input_buffer->range_length();
+ if (src_length == 195840) {
+ // When feeding the output of the AVC decoder into the H263 encoder,
+ // buffer sizes mismatch if width % 16 != 0 || height % 16 != 0.
+ src_length = 194400; // XXX HACK
+ } else if (src_length == 115200) {
+ src_length = 114240; // XXX HACK
+ }
+
+ if (src_length > mem->size()) {
+ LOGE("src_length=%d > mem->size() = %d\n",
+ src_length, mem->size());
+ }
+
+ assert(src_length <= mem->size());
+ memcpy(mem->pointer(), src_data, src_length);
+
+ OMX_U32 flags = 0;
+ if (!mIsMP3) {
+ // Only mp3 audio data may be streamed, all other data is assumed
+ // to be fed into the decoder at frame boundaries.
+ flags |= OMX_BUFFERFLAG_ENDOFFRAME;
+ }
+
+ int32_t units, scale;
+ bool success =
+ input_buffer->meta_data()->findInt32(kKeyTimeUnits, &units);
+
+ success = success &&
+ input_buffer->meta_data()->findInt32(kKeyTimeScale, &scale);
+
+ OMX_TICKS timestamp = 0;
+
+ if (success) {
+ // XXX units should be microseconds but PV treats them as milliseconds.
+ timestamp = ((OMX_S64)units * 1000) / scale;
+ }
+
+ input_buffer->release();
+ input_buffer = NULL;
+
+ LOGV("[%s] Calling EmptyBuffer on buffer %p",
+ mComponentName, buffer);
+
+ status_t err2 = mClient->emptyBuffer(
+ mNode, buffer, 0, src_length, flags, timestamp);
+ assert(err2 == OK);
+}
+
+void OMXDecoder::onRealFillBufferDone(const omx_message &msg) {
+ OMXMediaBuffer *media_buffer =
+ mMediaBufferMap.valueFor(msg.u.extended_buffer_data.buffer);
+
+ media_buffer->set_range(
+ msg.u.extended_buffer_data.range_offset,
+ msg.u.extended_buffer_data.range_length);
+
+ media_buffer->add_ref();
+
+ media_buffer->meta_data()->clear();
+
+ media_buffer->meta_data()->setInt32(
+ kKeyTimeUnits, msg.u.extended_buffer_data.timestamp);
+ media_buffer->meta_data()->setInt32(kKeyTimeScale, 1000);
+
+ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_SYNCFRAME) {
+ media_buffer->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+
+ media_buffer->meta_data()->setPointer(
+ kKeyPlatformPrivate,
+ msg.u.extended_buffer_data.platform_private);
+
+ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_EOS) {
+ mErrorCondition = ERROR_END_OF_STREAM;
+ }
+
+ mOutputBuffers.push_back(media_buffer);
+ mOutputBufferAvailable.signal();
+}
+
+void OMXDecoder::signalBufferReturned(MediaBuffer *_buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMXMediaBuffer *media_buffer = static_cast<OMXMediaBuffer *>(_buffer);
+
+ IOMX::buffer_id buffer = media_buffer->buffer_id();
+
+ PortStatus outputStatus = getPortStatus(kPortIndexOutput);
+ if (outputStatus == kPortStatusShutdown
+ || outputStatus == kPortStatusFlushing) {
+ mBuffers.editItemAt(kPortIndexOutput).push_back(buffer);
+ } else {
+ LOGV("[%s] Calling FillBuffer on buffer %p.", mComponentName, buffer);
+
+ status_t err = mClient->fillBuffer(mNode, buffer);
+ assert(err == NO_ERROR);
+ }
+}
+
+void OMXDecoder::freeInputBuffer(IOMX::buffer_id buffer) {
+ LOGV("freeInputBuffer %p", buffer);
+
+ status_t err = mOMX->free_buffer(mNode, kPortIndexInput, buffer);
+ assert(err == NO_ERROR);
+ mBufferMap.removeItem(buffer);
+
+ LOGV("freeInputBuffer %p done", buffer);
+}
+
+void OMXDecoder::freeOutputBuffer(IOMX::buffer_id buffer) {
+ LOGV("freeOutputBuffer %p", buffer);
+
+ status_t err = mOMX->free_buffer(mNode, kPortIndexOutput, buffer);
+ assert(err == NO_ERROR);
+ mBufferMap.removeItem(buffer);
+
+ ssize_t index = mMediaBufferMap.indexOfKey(buffer);
+ assert(index >= 0);
+ MediaBuffer *mbuffer = mMediaBufferMap.editValueAt(index);
+ mMediaBufferMap.removeItemsAt(index);
+ mbuffer->setObserver(NULL);
+ mbuffer->release();
+ mbuffer = NULL;
+
+ LOGV("freeOutputBuffer %p done", buffer);
+}
+
+void OMXDecoder::dumpPortDefinition(OMX_U32 port_index) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 1;
+ def.nPortIndex = port_index;
+
+ status_t err = mOMX->get_parameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ assert(err == NO_ERROR);
+
+ LOGI("DumpPortDefinition on port %ld", port_index);
+ LOGI("nBufferCountActual = %ld, nBufferCountMin = %ld, nBufferSize = %ld",
+ def.nBufferCountActual, def.nBufferCountMin, def.nBufferSize);
+ switch (def.eDomain) {
+ case OMX_PortDomainAudio:
+ {
+ LOGI("eDomain = AUDIO");
+
+ if (port_index == kPortIndexOutput) {
+ OMX_AUDIO_PORTDEFINITIONTYPE *audio_def = &def.format.audio;
+ assert(audio_def->eEncoding == OMX_AUDIO_CodingPCM);
+
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ params.nSize = sizeof(params);
+ params.nVersion.s.nVersionMajor = 1;
+ params.nVersion.s.nVersionMinor = 1;
+ params.nPortIndex = port_index;
+
+ err = mOMX->get_parameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ assert(err == OK);
+
+ assert(params.nChannels == 1 || params.bInterleaved);
+ assert(params.eNumData == OMX_NumericalDataSigned);
+ assert(params.nBitPerSample == 16);
+ assert(params.ePCMMode == OMX_AUDIO_PCMModeLinear);
+
+ LOGI("nChannels = %ld, nSamplingRate = %ld",
+ params.nChannels, params.nSamplingRate);
+ }
+
+ break;
+ }
+
+ case OMX_PortDomainVideo:
+ {
+ LOGI("eDomain = VIDEO");
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+ LOGI("nFrameWidth = %ld, nFrameHeight = %ld, nStride = %ld, "
+ "nSliceHeight = %ld",
+ video_def->nFrameWidth, video_def->nFrameHeight,
+ video_def->nStride, video_def->nSliceHeight);
+ LOGI("nBitrate = %ld, xFrameRate = %.2f",
+ video_def->nBitrate, video_def->xFramerate / 65536.0f);
+ LOGI("eCompressionFormat = %d, eColorFormat = %d",
+ video_def->eCompressionFormat, video_def->eColorFormat);
+
+ break;
+ }
+
+ default:
+ LOGI("eDomain = UNKNOWN");
+ break;
+ }
+}
+
+void OMXDecoder::postStart() {
+ omx_message msg;
+ msg.type = omx_message::START;
+ postMessage(msg);
+}
+
+void OMXDecoder::postEmptyBufferDone(IOMX::buffer_id buffer) {
+ omx_message msg;
+ msg.type = omx_message::EMPTY_BUFFER_DONE;
+ msg.u.buffer_data.node = mNode;
+ msg.u.buffer_data.buffer = buffer;
+ postMessage(msg);
+}
+
+void OMXDecoder::postInitialFillBuffer(IOMX::buffer_id buffer) {
+ omx_message msg;
+ msg.type = omx_message::INITIAL_FILL_BUFFER;
+ msg.u.buffer_data.node = mNode;
+ msg.u.buffer_data.buffer = buffer;
+ postMessage(msg);
+}
+
+} // namespace android
diff --git a/media/libstagefright/QComHardwareRenderer.cpp b/media/libstagefright/QComHardwareRenderer.cpp
new file mode 100644
index 0000000..5a23792
--- /dev/null
+++ b/media/libstagefright/QComHardwareRenderer.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryHeapPmem.h>
+#include <media/stagefright/QComHardwareRenderer.h>
+#include <ui/ISurface.h>
+
+namespace android {
+
+////////////////////////////////////////////////////////////////////////////////
+
+typedef struct PLATFORM_PRIVATE_ENTRY
+{
+ /* Entry type */
+ uint32_t type;
+
+ /* Pointer to platform specific entry */
+ void *entry;
+
+} PLATFORM_PRIVATE_ENTRY;
+
+typedef struct PLATFORM_PRIVATE_LIST
+{
+ /* Number of entries */
+ uint32_t nEntries;
+
+ /* Pointer to array of platform specific entries *
+ * Contiguous block of PLATFORM_PRIVATE_ENTRY elements */
+ PLATFORM_PRIVATE_ENTRY *entryList;
+
+} PLATFORM_PRIVATE_LIST;
+
+// data structures for tunneling buffers
+typedef struct PLATFORM_PRIVATE_PMEM_INFO
+{
+ /* pmem file descriptor */
+ uint32_t pmem_fd;
+ uint32_t offset;
+
+} PLATFORM_PRIVATE_PMEM_INFO;
+
+#define PLATFORM_PRIVATE_PMEM 1
+
+QComHardwareRenderer::QComHardwareRenderer(
+ const sp<ISurface> &surface,
+ size_t displayWidth, size_t displayHeight,
+ size_t decodedWidth, size_t decodedHeight)
+ : mISurface(surface),
+ mDisplayWidth(displayWidth),
+ mDisplayHeight(displayHeight),
+ mDecodedWidth(decodedWidth),
+ mDecodedHeight(decodedHeight),
+ mFrameSize((mDecodedWidth * mDecodedHeight * 3) / 2) {
+ assert(mISurface.get() != NULL);
+ assert(mDecodedWidth > 0);
+ assert(mDecodedHeight > 0);
+}
+
+QComHardwareRenderer::~QComHardwareRenderer() {
+ mISurface->unregisterBuffers();
+}
+
+void QComHardwareRenderer::render(
+ const void *data, size_t size, void *platformPrivate) {
+ size_t offset;
+ if (!getOffset(platformPrivate, &offset)) {
+ return;
+ }
+
+ mISurface->postBuffer(offset);
+}
+
+bool QComHardwareRenderer::getOffset(void *platformPrivate, size_t *offset) {
+ *offset = 0;
+
+ PLATFORM_PRIVATE_LIST *list = (PLATFORM_PRIVATE_LIST *)platformPrivate;
+ for (uint32_t i = 0; i < list->nEntries; ++i) {
+ if (list->entryList[i].type != PLATFORM_PRIVATE_PMEM) {
+ continue;
+ }
+
+ PLATFORM_PRIVATE_PMEM_INFO *info =
+ (PLATFORM_PRIVATE_PMEM_INFO *)list->entryList[i].entry;
+
+ if (info != NULL) {
+ if (mMemoryHeap.get() == NULL) {
+ publishBuffers(info->pmem_fd);
+ }
+
+ if (mMemoryHeap.get() == NULL) {
+ return false;
+ }
+
+ *offset = info->offset;
+
+ return true;
+ }
+ }
+
+ return false;
+}
+
+void QComHardwareRenderer::publishBuffers(uint32_t pmem_fd) {
+ sp<MemoryHeapBase> master =
+ reinterpret_cast<MemoryHeapBase *>(pmem_fd);
+
+ master->setDevice("/dev/pmem");
+
+ mMemoryHeap = new MemoryHeapPmem(master, 0);
+ mMemoryHeap->slap();
+
+ ISurface::BufferHeap bufferHeap(
+ mDisplayWidth, mDisplayHeight,
+ mDecodedWidth, mDecodedHeight,
+ PIXEL_FORMAT_YCbCr_420_SP,
+ mMemoryHeap);
+
+ status_t err = mISurface->registerBuffers(bufferHeap);
+ assert(err == OK);
+}
+
+} // namespace android
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
new file mode 100644
index 0000000..8f1fa67
--- /dev/null
+++ b/media/libstagefright/SampleTable.cpp
@@ -0,0 +1,598 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SampleTable"
+#include <utils/Log.h>
+
+#include <arpa/inet.h>
+#include <assert.h>
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/SampleTable.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static const uint32_t kChunkOffsetType32 = FOURCC('s', 't', 'c', 'o');
+static const uint32_t kChunkOffsetType64 = FOURCC('c', 'o', '6', '4');
+static const uint32_t kSampleSizeType32 = FOURCC('s', 't', 's', 'z');
+static const uint32_t kSampleSizeTypeCompact = FOURCC('s', 't', 'z', '2');
+
+SampleTable::SampleTable(DataSource *source)
+ : mDataSource(source),
+ mChunkOffsetOffset(-1),
+ mChunkOffsetType(0),
+ mNumChunkOffsets(0),
+ mSampleToChunkOffset(-1),
+ mNumSampleToChunkOffsets(0),
+ mSampleSizeOffset(-1),
+ mSampleSizeFieldSize(0),
+ mDefaultSampleSize(0),
+ mNumSampleSizes(0),
+ mTimeToSampleCount(0),
+ mTimeToSample(NULL),
+ mSyncSampleOffset(-1),
+ mNumSyncSamples(0) {
+}
+
+SampleTable::~SampleTable() {
+ delete[] mTimeToSample;
+ mTimeToSample = NULL;
+}
+
+status_t SampleTable::setChunkOffsetParams(
+ uint32_t type, off_t data_offset, off_t data_size) {
+ if (mChunkOffsetOffset >= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ assert(type == kChunkOffsetType32 || type == kChunkOffsetType64);
+
+ mChunkOffsetOffset = data_offset;
+ mChunkOffsetType = type;
+
+ if (data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t header[8];
+ if (mDataSource->read_at(
+ data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ mNumChunkOffsets = U32_AT(&header[4]);
+
+ if (mChunkOffsetType == kChunkOffsetType32) {
+ if (data_size < 8 + mNumChunkOffsets * 4) {
+ return ERROR_MALFORMED;
+ }
+ } else {
+ if (data_size < 8 + mNumChunkOffsets * 8) {
+ return ERROR_MALFORMED;
+ }
+ }
+
+ return OK;
+}
+
+status_t SampleTable::setSampleToChunkParams(
+ off_t data_offset, off_t data_size) {
+ if (mSampleToChunkOffset >= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ mSampleToChunkOffset = data_offset;
+
+ if (data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t header[8];
+ if (mDataSource->read_at(
+ data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ mNumSampleToChunkOffsets = U32_AT(&header[4]);
+
+ if (data_size < 8 + mNumSampleToChunkOffsets * 12) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+}
+
+status_t SampleTable::setSampleSizeParams(
+ uint32_t type, off_t data_offset, off_t data_size) {
+ if (mSampleSizeOffset >= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ assert(type == kSampleSizeType32 || type == kSampleSizeTypeCompact);
+
+ mSampleSizeOffset = data_offset;
+
+ if (data_size < 12) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t header[12];
+ if (mDataSource->read_at(
+ data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ mDefaultSampleSize = U32_AT(&header[4]);
+ mNumSampleSizes = U32_AT(&header[8]);
+
+ if (type == kSampleSizeType32) {
+ mSampleSizeFieldSize = 32;
+
+ if (mDefaultSampleSize != 0) {
+ return OK;
+ }
+
+ if (data_size < 12 + mNumSampleSizes * 4) {
+ return ERROR_MALFORMED;
+ }
+ } else {
+ if ((mDefaultSampleSize & 0xffffff00) != 0) {
+ // The high 24 bits are reserved and must be 0.
+ return ERROR_MALFORMED;
+ }
+
+ mSampleSizeFieldSize = mDefaultSampleSize & 0xf;
+ mDefaultSampleSize = 0;
+
+ if (mSampleSizeFieldSize != 4 && mSampleSizeFieldSize != 8
+ && mSampleSizeFieldSize != 16) {
+ return ERROR_MALFORMED;
+ }
+
+ if (data_size < 12 + (mNumSampleSizes * mSampleSizeFieldSize + 4) / 8) {
+ return ERROR_MALFORMED;
+ }
+ }
+
+ return OK;
+}
+
+status_t SampleTable::setTimeToSampleParams(
+ off_t data_offset, off_t data_size) {
+ if (mTimeToSample != NULL || data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t header[8];
+ if (mDataSource->read_at(
+ data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ mTimeToSampleCount = U32_AT(&header[4]);
+ mTimeToSample = new uint32_t[mTimeToSampleCount * 2];
+
+ size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2;
+ if (mDataSource->read_at(
+ data_offset + 8, mTimeToSample, size) < (ssize_t)size) {
+ return ERROR_IO;
+ }
+
+ for (uint32_t i = 0; i < mTimeToSampleCount * 2; ++i) {
+ mTimeToSample[i] = ntohl(mTimeToSample[i]);
+ }
+
+ return OK;
+}
+
+status_t SampleTable::setSyncSampleParams(off_t data_offset, off_t data_size) {
+ if (mSyncSampleOffset >= 0 || data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ mSyncSampleOffset = data_offset;
+
+ uint8_t header[8];
+ if (mDataSource->read_at(
+ data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ mNumSyncSamples = U32_AT(&header[4]);
+
+ if (mNumSyncSamples < 2) {
+ LOGW("Table of sync samples is empty or has only a single entry!");
+ }
+ return OK;
+}
+
+uint32_t SampleTable::countChunkOffsets() const {
+ return mNumChunkOffsets;
+}
+
+status_t SampleTable::getChunkOffset(uint32_t chunk_index, off_t *offset) {
+ *offset = 0;
+
+ if (mChunkOffsetOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (chunk_index >= mNumChunkOffsets) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ if (mChunkOffsetType == kChunkOffsetType32) {
+ uint32_t offset32;
+
+ if (mDataSource->read_at(
+ mChunkOffsetOffset + 8 + 4 * chunk_index,
+ &offset32,
+ sizeof(offset32)) < (ssize_t)sizeof(offset32)) {
+ return ERROR_IO;
+ }
+
+ *offset = ntohl(offset32);
+ } else {
+ assert(mChunkOffsetOffset == kChunkOffsetType64);
+
+ uint64_t offset64;
+ if (mDataSource->read_at(
+ mChunkOffsetOffset + 8 + 8 * chunk_index,
+ &offset64,
+ sizeof(offset64)) < (ssize_t)sizeof(offset64)) {
+ return ERROR_IO;
+ }
+
+ *offset = ntoh64(offset64);
+ }
+
+ return OK;
+}
+
+status_t SampleTable::getChunkForSample(
+ uint32_t sample_index,
+ uint32_t *chunk_index,
+ uint32_t *chunk_relative_sample_index,
+ uint32_t *desc_index) {
+ *chunk_index = 0;
+ *chunk_relative_sample_index = 0;
+ *desc_index = 0;
+
+ if (mSampleToChunkOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (sample_index >= countSamples()) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ uint32_t first_chunk = 0;
+ uint32_t samples_per_chunk = 0;
+ uint32_t chunk_desc_index = 0;
+
+ uint32_t index = 0;
+ while (index < mNumSampleToChunkOffsets) {
+ uint8_t buffer[12];
+ if (mDataSource->read_at(mSampleToChunkOffset + 8 + index * 12,
+ buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) {
+ return ERROR_IO;
+ }
+
+ uint32_t stop_chunk = U32_AT(buffer);
+ if (sample_index < (stop_chunk - first_chunk) * samples_per_chunk) {
+ break;
+ }
+
+ sample_index -= (stop_chunk - first_chunk) * samples_per_chunk;
+ first_chunk = stop_chunk;
+ samples_per_chunk = U32_AT(&buffer[4]);
+ chunk_desc_index = U32_AT(&buffer[8]);
+
+ ++index;
+ }
+
+ *chunk_index = sample_index / samples_per_chunk + first_chunk - 1;
+ *chunk_relative_sample_index = sample_index % samples_per_chunk;
+ *desc_index = chunk_desc_index;
+
+ return OK;
+}
+
+uint32_t SampleTable::countSamples() const {
+ return mNumSampleSizes;
+}
+
+status_t SampleTable::getSampleSize(
+ uint32_t sample_index, size_t *sample_size) {
+ *sample_size = 0;
+
+ if (mSampleSizeOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (sample_index >= mNumSampleSizes) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ if (mDefaultSampleSize > 0) {
+ *sample_size = mDefaultSampleSize;
+ return OK;
+ }
+
+ switch (mSampleSizeFieldSize) {
+ case 32:
+ {
+ if (mDataSource->read_at(
+ mSampleSizeOffset + 12 + 4 * sample_index,
+ sample_size, sizeof(*sample_size)) < (ssize_t)sizeof(*sample_size)) {
+ return ERROR_IO;
+ }
+
+ *sample_size = ntohl(*sample_size);
+ break;
+ }
+
+ case 16:
+ {
+ uint16_t x;
+ if (mDataSource->read_at(
+ mSampleSizeOffset + 12 + 2 * sample_index,
+ &x, sizeof(x)) < (ssize_t)sizeof(x)) {
+ return ERROR_IO;
+ }
+
+ *sample_size = ntohs(x);
+ break;
+ }
+
+ case 8:
+ {
+ uint8_t x;
+ if (mDataSource->read_at(
+ mSampleSizeOffset + 12 + sample_index,
+ &x, sizeof(x)) < (ssize_t)sizeof(x)) {
+ return ERROR_IO;
+ }
+
+ *sample_size = x;
+ break;
+ }
+
+ default:
+ {
+ assert(mSampleSizeFieldSize == 4);
+
+ uint8_t x;
+ if (mDataSource->read_at(
+ mSampleSizeOffset + 12 + sample_index / 2,
+ &x, sizeof(x)) < (ssize_t)sizeof(x)) {
+ return ERROR_IO;
+ }
+
+ *sample_size = (sample_index & 1) ? x & 0x0f : x >> 4;
+ break;
+ }
+ }
+
+ return OK;
+}
+
+status_t SampleTable::getSampleOffsetAndSize(
+ uint32_t sample_index, off_t *offset, size_t *size) {
+ Mutex::Autolock autoLock(mLock);
+
+ *offset = 0;
+ *size = 0;
+
+ uint32_t chunk_index;
+ uint32_t chunk_relative_sample_index;
+ uint32_t desc_index;
+ status_t err = getChunkForSample(
+ sample_index, &chunk_index, &chunk_relative_sample_index,
+ &desc_index);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = getChunkOffset(chunk_index, offset);
+
+ if (err != OK) {
+ return err;
+ }
+
+ for (uint32_t j = 0; j < chunk_relative_sample_index; ++j) {
+ size_t sample_size;
+ err = getSampleSize(sample_index - j - 1, &sample_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += sample_size;
+ }
+
+ err = getSampleSize(sample_index, size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return OK;
+}
+
+status_t SampleTable::getMaxSampleSize(size_t *max_size) {
+ Mutex::Autolock autoLock(mLock);
+
+ *max_size = 0;
+
+ for (uint32_t i = 0; i < mNumSampleSizes; ++i) {
+ size_t sample_size;
+ status_t err = getSampleSize(i, &sample_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (sample_size > *max_size) {
+ *max_size = sample_size;
+ }
+ }
+
+ return OK;
+}
+
+status_t SampleTable::getDecodingTime(uint32_t sample_index, uint32_t *time) {
+ // XXX FIXME idiotic (for the common use-case) O(n) algorithm below...
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (sample_index >= mNumSampleSizes) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ uint32_t cur_sample = 0;
+ *time = 0;
+ for (uint32_t i = 0; i < mTimeToSampleCount; ++i) {
+ uint32_t n = mTimeToSample[2 * i];
+ uint32_t delta = mTimeToSample[2 * i + 1];
+
+ if (sample_index < cur_sample + n) {
+ *time += delta * (sample_index - cur_sample);
+
+ return OK;
+ }
+
+ *time += delta * n;
+ cur_sample += n;
+ }
+
+ return ERROR_OUT_OF_RANGE;
+}
+
+status_t SampleTable::findClosestSample(
+ uint32_t req_time, uint32_t *sample_index, uint32_t flags) {
+ Mutex::Autolock autoLock(mLock);
+
+ uint32_t cur_sample = 0;
+ uint32_t time = 0;
+ for (uint32_t i = 0; i < mTimeToSampleCount; ++i) {
+ uint32_t n = mTimeToSample[2 * i];
+ uint32_t delta = mTimeToSample[2 * i + 1];
+
+ if (req_time < time + n * delta) {
+ int j = (req_time - time) / delta;
+
+ *sample_index = cur_sample + j;
+
+ if (flags & kSyncSample_Flag) {
+ return findClosestSyncSample(*sample_index, sample_index);
+ }
+
+ return OK;
+ }
+
+ time += delta * n;
+ cur_sample += n;
+ }
+
+ return ERROR_OUT_OF_RANGE;
+}
+
+status_t SampleTable::findClosestSyncSample(
+ uint32_t start_sample_index, uint32_t *sample_index) {
+ *sample_index = 0;
+
+ if (mSyncSampleOffset < 0) {
+ // All samples are sync-samples.
+ *sample_index = start_sample_index;
+ return OK;
+ }
+
+ uint32_t x;
+ uint32_t left = 0;
+ uint32_t right = mNumSyncSamples;
+ while (left < right) {
+ uint32_t mid = (left + right) / 2;
+ if (mDataSource->read_at(
+ mSyncSampleOffset + 8 + (mid - 1) * 4, &x, 4) != 4) {
+ return ERROR_IO;
+ }
+
+ x = ntohl(x);
+
+ if (x < (start_sample_index + 1)) {
+ left = mid + 1;
+ } else if (x > (start_sample_index + 1)) {
+ right = mid;
+ } else {
+ break;
+ }
+ }
+
+#if 1
+ // Make sure we return a sample at or _after_ the requested one.
+ // Seeking to a particular time in a media source containing audio and
+ // video will most likely be able to sync fairly close to the requested
+ // time in the audio track but may only be able to seek a fair distance
+ // from the requested time in the video track.
+ // If we seek the video track to a time earlier than the audio track,
+ // we'll cause the video track to be late for quite a while, the decoder
+ // trying to catch up.
+ // If we seek the video track to a time later than the audio track,
+ // audio will start playing fine while no video will be output for a
+ // while, the video decoder will not stress the system.
+ if (mDataSource->read_at(
+ mSyncSampleOffset + 8 + (left - 1) * 4, &x, 4) != 4) {
+ return ERROR_IO;
+ }
+ x = ntohl(x);
+ assert((x - 1) >= start_sample_index);
+#endif
+
+ *sample_index = x - 1;
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp
new file mode 100644
index 0000000..17b626e
--- /dev/null
+++ b/media/libstagefright/ShoutcastSource.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdlib.h>
+
+#include <media/stagefright/HTTPStream.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/ShoutcastSource.h>
+#include <media/stagefright/string.h>
+
+namespace android {
+
+ShoutcastSource::ShoutcastSource(HTTPStream *http)
+ : mHttp(http),
+ mMetaDataOffset(0),
+ mBytesUntilMetaData(0),
+ mGroup(NULL),
+ mStarted(false) {
+ string metaint;
+ if (mHttp->find_header_value("icy-metaint", &metaint)) {
+ char *end;
+ const char *start = metaint.c_str();
+ mMetaDataOffset = strtol(start, &end, 10);
+ assert(end > start && *end == '\0');
+ assert(mMetaDataOffset > 0);
+
+ mBytesUntilMetaData = mMetaDataOffset;
+ }
+}
+
+ShoutcastSource::~ShoutcastSource() {
+ if (mStarted) {
+ stop();
+ }
+
+ delete mHttp;
+ mHttp = NULL;
+}
+
+status_t ShoutcastSource::start(MetaData *) {
+ assert(!mStarted);
+
+ mGroup = new MediaBufferGroup;
+ mGroup->add_buffer(new MediaBuffer(4096)); // XXX
+
+ mStarted = true;
+
+ return OK;
+}
+
+status_t ShoutcastSource::stop() {
+ assert(mStarted);
+
+ delete mGroup;
+ mGroup = NULL;
+
+ mStarted = false;
+
+ return OK;
+}
+
+sp<MetaData> ShoutcastSource::getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setCString(kKeyMIMEType, "audio/mpeg");
+ meta->setInt32(kKeySampleRate, 44100);
+ meta->setInt32(kKeyChannelCount, 2); // XXX
+
+ return meta;
+}
+
+status_t ShoutcastSource::read(
+ MediaBuffer **out, const ReadOptions *options) {
+ assert(mStarted);
+
+ *out = NULL;
+
+ int64_t seekTimeUs;
+ if (options && options->getSeekTo(&seekTimeUs)) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ *out = buffer;
+
+ size_t num_bytes = buffer->size();
+ if (mMetaDataOffset > 0 && num_bytes > mBytesUntilMetaData) {
+ num_bytes = mBytesUntilMetaData;
+ }
+
+ ssize_t n = mHttp->receive(buffer->data(), num_bytes);
+
+ if (n <= 0) {
+ return (status_t)n;
+ }
+
+ buffer->set_range(0, n);
+
+ mBytesUntilMetaData -= (size_t)n;
+
+ if (mBytesUntilMetaData == 0) {
+ unsigned char num_16_byte_blocks = 0;
+ n = mHttp->receive((char *)&num_16_byte_blocks, 1);
+ assert(n == 1);
+
+ char meta[255 * 16];
+ size_t meta_size = num_16_byte_blocks * 16;
+ size_t meta_length = 0;
+ while (meta_length < meta_size) {
+ n = mHttp->receive(&meta[meta_length], meta_size - meta_length);
+ if (n <= 0) {
+ return (status_t)n;
+ }
+
+ meta_length += (size_t) n;
+ }
+
+ while (meta_length > 0 && meta[meta_length - 1] == '\0') {
+ --meta_length;
+ }
+
+ if (meta_length > 0) {
+ // Technically we should probably attach this meta data to the
+ // next buffer. XXX
+ buffer->meta_data()->setData('shou', 'shou', meta, meta_length);
+ }
+
+ mBytesUntilMetaData = mMetaDataOffset;
+ }
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/SoftwareRenderer.cpp b/media/libstagefright/SoftwareRenderer.cpp
new file mode 100644
index 0000000..66b6b07
--- /dev/null
+++ b/media/libstagefright/SoftwareRenderer.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SoftwareRenderer"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/SoftwareRenderer.h>
+#include <ui/ISurface.h>
+
+namespace android {
+
+#define QCOM_YUV 0
+
+SoftwareRenderer::SoftwareRenderer(
+ const sp<ISurface> &surface,
+ size_t displayWidth, size_t displayHeight,
+ size_t decodedWidth, size_t decodedHeight)
+ : mISurface(surface),
+ mDisplayWidth(displayWidth),
+ mDisplayHeight(displayHeight),
+ mDecodedWidth(decodedWidth),
+ mDecodedHeight(decodedHeight),
+ mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
+ mMemoryHeap(new MemoryHeapBase(2 * mFrameSize)),
+ mIndex(0) {
+ assert(mISurface.get() != NULL);
+ assert(mDecodedWidth > 0);
+ assert(mDecodedHeight > 0);
+ assert(mMemoryHeap->heapID() >= 0);
+
+ ISurface::BufferHeap bufferHeap(
+ mDisplayWidth, mDisplayHeight,
+ mDecodedWidth, mDecodedHeight,
+ PIXEL_FORMAT_RGB_565,
+ mMemoryHeap);
+
+ status_t err = mISurface->registerBuffers(bufferHeap);
+ assert(err == OK);
+}
+
+SoftwareRenderer::~SoftwareRenderer() {
+ mISurface->unregisterBuffers();
+}
+
+void SoftwareRenderer::render(
+ const void *data, size_t size, void *platformPrivate) {
+ assert(size >= (mDecodedWidth * mDecodedHeight * 3) / 2);
+
+ static const signed kClipMin = -278;
+ static const signed kClipMax = 535;
+ static uint8_t kClip[kClipMax - kClipMin + 1];
+ static uint8_t *kAdjustedClip = &kClip[-kClipMin];
+
+ static bool clipInitialized = false;
+
+ if (!clipInitialized) {
+ for (signed i = kClipMin; i <= kClipMax; ++i) {
+ kClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
+ }
+ clipInitialized = true;
+ }
+
+ size_t offset = mIndex * mFrameSize;
+
+ void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+
+ uint32_t *dst_ptr = (uint32_t *)dst;
+
+ const uint8_t *src_y = (const uint8_t *)data;
+
+ const uint8_t *src_u =
+ (const uint8_t *)src_y + mDecodedWidth * mDecodedHeight;
+
+#if !QCOM_YUV
+ const uint8_t *src_v =
+ (const uint8_t *)src_u + (mDecodedWidth / 2) * (mDecodedHeight / 2);
+#endif
+
+ for (size_t y = 0; y < mDecodedHeight; ++y) {
+ for (size_t x = 0; x < mDecodedWidth; x += 2) {
+ // B = 1.164 * (Y - 16) + 2.018 * (U - 128)
+ // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
+ // R = 1.164 * (Y - 16) + 1.596 * (V - 128)
+
+ // B = 298/256 * (Y - 16) + 517/256 * (U - 128)
+ // G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128)
+ // R = .................. + 409/256 * (V - 128)
+
+ // min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277
+ // min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172
+ // min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223
+
+ // max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534
+ // max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432
+ // max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481
+
+ // clip range -278 .. 535
+
+ signed y1 = (signed)src_y[x] - 16;
+ signed y2 = (signed)src_y[x + 1] - 16;
+
+#if QCOM_YUV
+ signed u = (signed)src_u[x & ~1] - 128;
+ signed v = (signed)src_u[(x & ~1) + 1] - 128;
+#else
+ signed u = (signed)src_u[x / 2] - 128;
+ signed v = (signed)src_v[x / 2] - 128;
+#endif
+
+ signed u_b = u * 517;
+ signed u_g = -u * 100;
+ signed v_g = -v * 208;
+ signed v_r = v * 409;
+
+ signed tmp1 = y1 * 298;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ signed tmp2 = y2 * 298;
+ signed b2 = (tmp2 + u_b) / 256;
+ signed g2 = (tmp2 + v_g + u_g) / 256;
+ signed r2 = (tmp2 + v_r) / 256;
+
+ uint32_t rgb1 =
+ ((kAdjustedClip[r1] >> 3) << 11)
+ | ((kAdjustedClip[g1] >> 2) << 5)
+ | (kAdjustedClip[b1] >> 3);
+
+ uint32_t rgb2 =
+ ((kAdjustedClip[r2] >> 3) << 11)
+ | ((kAdjustedClip[g2] >> 2) << 5)
+ | (kAdjustedClip[b2] >> 3);
+
+ dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
+ }
+
+ src_y += mDecodedWidth;
+
+ if (y & 1) {
+#if QCOM_YUV
+ src_u += mDecodedWidth;
+#else
+ src_u += mDecodedWidth / 2;
+ src_v += mDecodedWidth / 2;
+#endif
+ }
+
+ dst_ptr += mDecodedWidth / 2;
+ }
+
+ mISurface->postBuffer(offset);
+ mIndex = 1 - mIndex;
+}
+
+} // namespace android
diff --git a/media/libstagefright/SurfaceRenderer.cpp b/media/libstagefright/SurfaceRenderer.cpp
new file mode 100644
index 0000000..e54288d
--- /dev/null
+++ b/media/libstagefright/SurfaceRenderer.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SurfaceRenderer"
+#include <utils/Log.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <media/stagefright/SurfaceRenderer.h>
+#include <ui/Surface.h>
+
+namespace android {
+
+SurfaceRenderer::SurfaceRenderer(
+ const sp<Surface> &surface,
+ size_t displayWidth, size_t displayHeight,
+ size_t decodedWidth, size_t decodedHeight)
+ : mSurface(surface),
+ mDisplayWidth(displayWidth),
+ mDisplayHeight(displayHeight),
+ mDecodedWidth(decodedWidth),
+ mDecodedHeight(decodedHeight) {
+}
+
+SurfaceRenderer::~SurfaceRenderer() {
+}
+
+void SurfaceRenderer::render(
+ const void *data, size_t size, void *platformPrivate) {
+ Surface::SurfaceInfo info;
+ status_t err = mSurface->lock(&info);
+ if (err != OK) {
+ return;
+ }
+
+ const uint8_t *src = (const uint8_t *)data;
+ uint8_t *dst = (uint8_t *)info.bits;
+
+ for (size_t i = 0; i < mDisplayHeight; ++i) {
+ memcpy(dst, src, mDisplayWidth);
+ src += mDecodedWidth;
+ dst += mDisplayWidth;
+ }
+ src += (mDecodedHeight - mDisplayHeight) * mDecodedWidth;
+
+ for (size_t i = 0; i < (mDisplayHeight + 1) / 2; ++i) {
+ memcpy(dst, src, (mDisplayWidth + 1) & ~1);
+ src += (mDecodedWidth + 1) & ~1;
+ dst += (mDisplayWidth + 1) & ~1;
+ }
+
+ mSurface->unlockAndPost();
+}
+
+} // namespace android
diff --git a/media/libstagefright/TimeSource.cpp b/media/libstagefright/TimeSource.cpp
new file mode 100644
index 0000000..d987fbf
--- /dev/null
+++ b/media/libstagefright/TimeSource.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stddef.h>
+#include <sys/time.h>
+
+#include <media/stagefright/TimeSource.h>
+
+namespace android {
+
+SystemTimeSource::SystemTimeSource()
+ : mStartTimeUs(GetSystemTimeUs()) {
+}
+
+int64_t SystemTimeSource::getRealTimeUs() {
+ return GetSystemTimeUs() - mStartTimeUs;
+}
+
+// static
+int64_t SystemTimeSource::GetSystemTimeUs() {
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp
new file mode 100644
index 0000000..2f8a19f
--- /dev/null
+++ b/media/libstagefright/TimedEventQueue.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef __STRICT_ANSI__
+#define __STDINT_LIMITS
+#define __STDC_LIMIT_MACROS
+#include <stdint.h>
+
+#define LOG_TAG "TimedEventQueue"
+#include <utils/Log.h>
+
+#include <sys/time.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include <media/stagefright/TimedEventQueue.h>
+
+namespace android {
+
+TimedEventQueue::TimedEventQueue()
+ : mRunning(false),
+ mStopped(false) {
+}
+
+TimedEventQueue::~TimedEventQueue() {
+ stop();
+}
+
+void TimedEventQueue::start() {
+ if (mRunning) {
+ return;
+ }
+
+ mStopped = false;
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThread, &attr, ThreadWrapper, this);
+
+ pthread_attr_destroy(&attr);
+
+ mRunning = true;
+}
+
+void TimedEventQueue::stop(bool flush) {
+ if (!mRunning) {
+ return;
+ }
+
+ if (flush) {
+ postEventToBack(new StopEvent);
+ } else {
+ postTimedEvent(new StopEvent, INT64_MIN);
+ }
+
+ void *dummy;
+ pthread_join(mThread, &dummy);
+
+ mQueue.clear();
+
+ mRunning = false;
+}
+
+void TimedEventQueue::postEvent(const sp<Event> &event) {
+ // Reserve an earlier timeslot an INT64_MIN to be able to post
+ // the StopEvent to the absolute head of the queue.
+ postTimedEvent(event, INT64_MIN + 1);
+}
+
+void TimedEventQueue::postEventToBack(const sp<Event> &event) {
+ postTimedEvent(event, INT64_MAX);
+}
+
+void TimedEventQueue::postEventWithDelay(
+ const sp<Event> &event, int64_t delay_us) {
+ assert(delay_us >= 0);
+ postTimedEvent(event, getRealTimeUs() + delay_us);
+}
+
+void TimedEventQueue::postTimedEvent(
+ const sp<Event> &event, int64_t realtime_us) {
+ Mutex::Autolock autoLock(mLock);
+
+ List<QueueItem>::iterator it = mQueue.begin();
+ while (it != mQueue.end() && realtime_us >= (*it).realtime_us) {
+ ++it;
+ }
+
+ QueueItem item;
+ item.event = event;
+ item.realtime_us = realtime_us;
+
+ if (it == mQueue.begin()) {
+ mQueueHeadChangedCondition.signal();
+ }
+
+ mQueue.insert(it, item);
+
+ mQueueNotEmptyCondition.signal();
+}
+
+bool TimedEventQueue::cancelEvent(const sp<Event> &event) {
+ Mutex::Autolock autoLock(mLock);
+
+ List<QueueItem>::iterator it = mQueue.begin();
+ while (it != mQueue.end() && (*it).event != event) {
+ ++it;
+ }
+
+ if (it == mQueue.end()) {
+ return false;
+ }
+
+ if (it == mQueue.begin()) {
+ mQueueHeadChangedCondition.signal();
+ }
+
+ mQueue.erase(it);
+
+ return true;
+}
+
+// static
+int64_t TimedEventQueue::getRealTimeUs() {
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec;
+}
+
+// static
+void *TimedEventQueue::ThreadWrapper(void *me) {
+ static_cast<TimedEventQueue *>(me)->threadEntry();
+
+ return NULL;
+}
+
+void TimedEventQueue::threadEntry() {
+ for (;;) {
+ int64_t now_us;
+ sp<Event> event;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mStopped) {
+ break;
+ }
+
+ while (mQueue.empty()) {
+ mQueueNotEmptyCondition.wait(mLock);
+ }
+
+ List<QueueItem>::iterator it;
+ for (;;) {
+ it = mQueue.begin();
+
+ now_us = getRealTimeUs();
+ int64_t when_us = (*it).realtime_us;
+
+ int64_t delay_us;
+ if (when_us < 0 || when_us == INT64_MAX) {
+ delay_us = 0;
+ } else {
+ delay_us = when_us - now_us;
+ }
+
+ if (delay_us <= 0) {
+ break;
+ }
+
+ status_t err = mQueueHeadChangedCondition.waitRelative(
+ mLock, delay_us * 1000);
+
+ if (err == -ETIMEDOUT) {
+ now_us = getRealTimeUs();
+ break;
+ }
+ }
+
+ event = (*it).event;
+ mQueue.erase(it);
+ }
+
+ // Fire event with the lock NOT held.
+ event->fire(this, now_us);
+ }
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
new file mode 100644
index 0000000..2720f93
--- /dev/null
+++ b/media/libstagefright/Utils.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arpa/inet.h>
+
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+uint16_t U16_AT(const uint8_t *ptr) {
+ return ptr[0] << 8 | ptr[1];
+}
+
+uint32_t U32_AT(const uint8_t *ptr) {
+ return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
+}
+
+uint64_t U64_AT(const uint8_t *ptr) {
+ return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4);
+}
+
+// XXX warning: these won't work on big-endian host.
+uint64_t ntoh64(uint64_t x) {
+ return ((uint64_t)ntohl(x & 0xffffffff) << 32) | ntohl(x >> 32);
+}
+
+uint64_t hton64(uint64_t x) {
+ return ((uint64_t)htonl(x & 0xffffffff) << 32) | htonl(x >> 32);
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
new file mode 100644
index 0000000..9c6d475
--- /dev/null
+++ b/media/libstagefright/omx/Android.mk
@@ -0,0 +1,27 @@
+ifeq ($(BUILD_WITH_STAGEFRIGHT),true)
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+# Set up the OpenCore variables.
+include external/opencore/Config.mk
+LOCAL_C_INCLUDES := $(PV_INCLUDES)
+LOCAL_CFLAGS := $(PV_CFLAGS_MINUS_VISIBILITY)
+
+LOCAL_SRC_FILES:= \
+ OMX.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libmedia \
+ libutils \
+ libui \
+ libopencore_common
+
+LOCAL_PRELINK_MODULE:= false
+
+LOCAL_MODULE:= libstagefright_omx
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
new file mode 100644
index 0000000..daaa741
--- /dev/null
+++ b/media/libstagefright/omx/OMX.cpp
@@ -0,0 +1,623 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMX"
+#include <utils/Log.h>
+
+#include <sys/socket.h>
+
+#undef NDEBUG
+#include <assert.h>
+
+#include "OMX.h"
+#include "pv_omxcore.h"
+
+#include <binder/IMemory.h>
+
+#include <OMX_Component.h>
+
+namespace android {
+
+class NodeMeta {
+public:
+ NodeMeta(OMX *owner)
+ : mOwner(owner),
+ mHandle(NULL) {
+ }
+
+ OMX *owner() const {
+ return mOwner;
+ }
+
+ void setHandle(OMX_HANDLETYPE handle) {
+ assert(mHandle == NULL);
+ mHandle = handle;
+ }
+
+ OMX_HANDLETYPE handle() const {
+ return mHandle;
+ }
+
+ void setObserver(const sp<IOMXObserver> &observer) {
+ mObserver = observer;
+ }
+
+ sp<IOMXObserver> observer() {
+ return mObserver;
+ }
+
+private:
+ OMX *mOwner;
+ OMX_HANDLETYPE mHandle;
+ sp<IOMXObserver> mObserver;
+
+ NodeMeta(const NodeMeta &);
+ NodeMeta &operator=(const NodeMeta &);
+};
+
+class BufferMeta {
+public:
+ BufferMeta(OMX *owner, const sp<IMemory> &mem, bool is_backup = false)
+ : mOwner(owner),
+ mMem(mem),
+ mIsBackup(is_backup) {
+ }
+
+ BufferMeta(OMX *owner, size_t size)
+ : mOwner(owner),
+ mSize(size),
+ mIsBackup(false) {
+ }
+
+ void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
+ if (!mIsBackup) {
+ return;
+ }
+
+ memcpy((OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->pBuffer + header->nOffset,
+ header->nFilledLen);
+ }
+
+ void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) {
+ if (!mIsBackup) {
+ return;
+ }
+
+ memcpy(header->pBuffer + header->nOffset,
+ (const OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->nFilledLen);
+ }
+
+private:
+ OMX *mOwner;
+ sp<IMemory> mMem;
+ size_t mSize;
+ bool mIsBackup;
+
+ BufferMeta(const BufferMeta &);
+ BufferMeta &operator=(const BufferMeta &);
+};
+
+// static
+OMX_CALLBACKTYPE OMX::kCallbacks = {
+ &OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
+};
+
+// static
+OMX_ERRORTYPE OMX::OnEvent(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData) {
+ NodeMeta *meta = static_cast<NodeMeta *>(pAppData);
+ return meta->owner()->OnEvent(meta, eEvent, nData1, nData2, pEventData);
+}
+
+// static
+OMX_ERRORTYPE OMX::OnEmptyBufferDone(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) {
+ NodeMeta *meta = static_cast<NodeMeta *>(pAppData);
+ return meta->owner()->OnEmptyBufferDone(meta, pBuffer);
+}
+
+// static
+OMX_ERRORTYPE OMX::OnFillBufferDone(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) {
+ NodeMeta *meta = static_cast<NodeMeta *>(pAppData);
+ return meta->owner()->OnFillBufferDone(meta, pBuffer);
+}
+
+OMX::OMX()
+#if IOMX_USES_SOCKETS
+ : mSock(-1)
+#endif
+{
+}
+
+OMX::~OMX() {
+#if IOMX_USES_SOCKETS
+ assert(mSock < 0);
+#endif
+}
+
+#if IOMX_USES_SOCKETS
+status_t OMX::connect(int *sd) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mSock >= 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ int sockets[2];
+ if (socketpair(AF_UNIX, SOCK_DGRAM, 0, sockets) < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ mSock = sockets[0];
+ *sd = sockets[1];
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ int err = pthread_create(&mThread, &attr, ThreadWrapper, this);
+ assert(err == 0);
+
+ pthread_attr_destroy(&attr);
+
+ return OK;
+}
+
+// static
+void *OMX::ThreadWrapper(void *me) {
+ ((OMX *)me)->threadEntry();
+
+ return NULL;
+}
+
+void OMX::threadEntry() {
+ bool done = false;
+ while (!done) {
+ omx_message msg;
+ ssize_t n = recv(mSock, &msg, sizeof(msg), 0);
+
+ if (n <= 0) {
+ break;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+
+ switch (msg.type) {
+ case omx_message::FILL_BUFFER:
+ {
+ OMX_BUFFERHEADERTYPE *header =
+ static_cast<OMX_BUFFERHEADERTYPE *>(
+ msg.u.buffer_data.buffer);
+
+ header->nFilledLen = 0;
+ header->nOffset = 0;
+ header->hMarkTargetComponent = NULL;
+ header->nFlags = 0;
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(
+ msg.u.buffer_data.node);
+
+ LOGV("FillThisBuffer buffer=%p", header);
+
+ OMX_ERRORTYPE err =
+ OMX_FillThisBuffer(node_meta->handle(), header);
+ assert(err == OMX_ErrorNone);
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER:
+ {
+ OMX_BUFFERHEADERTYPE *header =
+ static_cast<OMX_BUFFERHEADERTYPE *>(
+ msg.u.extended_buffer_data.buffer);
+
+ header->nFilledLen = msg.u.extended_buffer_data.range_length;
+ header->nOffset = msg.u.extended_buffer_data.range_offset;
+ header->hMarkTargetComponent = NULL;
+ header->nFlags = msg.u.extended_buffer_data.flags;
+ header->nTimeStamp = msg.u.extended_buffer_data.timestamp;
+
+ BufferMeta *buffer_meta =
+ static_cast<BufferMeta *>(header->pAppPrivate);
+ buffer_meta->CopyToOMX(header);
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(
+ msg.u.extended_buffer_data.node);
+
+ LOGV("EmptyThisBuffer buffer=%p", header);
+
+ OMX_ERRORTYPE err =
+ OMX_EmptyThisBuffer(node_meta->handle(), header);
+ assert(err == OMX_ErrorNone);
+ break;
+ }
+
+ case omx_message::SEND_COMMAND:
+ {
+ NodeMeta *node_meta = static_cast<NodeMeta *>(
+ msg.u.send_command_data.node);
+
+ OMX_ERRORTYPE err =
+ OMX_SendCommand(
+ node_meta->handle(), msg.u.send_command_data.cmd,
+ msg.u.send_command_data.param, NULL);
+ assert(err == OMX_ErrorNone);
+ break;
+ }
+
+ case omx_message::DISCONNECT:
+ {
+ omx_message msg;
+ msg.type = omx_message::DISCONNECTED;
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+ done = true;
+ break;
+ }
+
+ default:
+ LOGE("received unknown omx_message type %d", msg.type);
+ break;
+ }
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ close(mSock);
+ mSock = -1;
+}
+#endif
+
+status_t OMX::list_nodes(List<String8> *list) {
+ OMX_MasterInit(); // XXX Put this somewhere else.
+
+ list->clear();
+
+ OMX_U32 index = 0;
+ char componentName[256];
+ while (OMX_MasterComponentNameEnum(componentName, sizeof(componentName), index)
+ == OMX_ErrorNone) {
+ list->push_back(String8(componentName));
+
+ ++index;
+ }
+
+ return OK;
+}
+
+status_t OMX::allocate_node(const char *name, node_id *node) {
+ Mutex::Autolock autoLock(mLock);
+
+ *node = 0;
+
+ OMX_MasterInit(); // XXX Put this somewhere else.
+
+ NodeMeta *meta = new NodeMeta(this);
+
+ OMX_HANDLETYPE handle;
+ OMX_ERRORTYPE err = OMX_MasterGetHandle(
+ &handle, const_cast<char *>(name), meta, &kCallbacks);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("FAILED to allocate omx component '%s'", name);
+
+ delete meta;
+ meta = NULL;
+
+ return UNKNOWN_ERROR;
+ }
+
+ meta->setHandle(handle);
+
+ *node = meta;
+
+ return OK;
+}
+
+status_t OMX::free_node(node_id node) {
+ Mutex::Autolock autoLock(mLock);
+
+ NodeMeta *meta = static_cast<NodeMeta *>(node);
+
+ OMX_ERRORTYPE err = OMX_MasterFreeHandle(meta->handle());
+
+ delete meta;
+ meta = NULL;
+
+ return (err != OMX_ErrorNone) ? UNKNOWN_ERROR : OK;
+}
+
+status_t OMX::send_command(
+ node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ Mutex::Autolock autoLock(mLock);
+
+#if IOMX_USES_SOCKETS
+ if (mSock < 0) {
+ return UNKNOWN_ERROR;
+ }
+#endif
+
+ NodeMeta *meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err = OMX_SendCommand(meta->handle(), cmd, param, NULL);
+
+ return (err != OMX_ErrorNone) ? UNKNOWN_ERROR : OK;
+}
+
+status_t OMX::get_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ void *params, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ NodeMeta *meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err = OMX_GetParameter(meta->handle(), index, params);
+
+ return (err != OMX_ErrorNone) ? UNKNOWN_ERROR : OK;
+}
+
+status_t OMX::set_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ const void *params, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ NodeMeta *meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err =
+ OMX_SetParameter(meta->handle(), index, const_cast<void *>(params));
+
+ return (err != OMX_ErrorNone) ? UNKNOWN_ERROR : OK;
+}
+
+status_t OMX::use_buffer(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ BufferMeta *buffer_meta = new BufferMeta(this, params);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err =
+ OMX_UseBuffer(node_meta->handle(), &header, port_index, buffer_meta,
+ params->size(), static_cast<OMX_U8 *>(params->pointer()));
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+
+ delete buffer_meta;
+ buffer_meta = NULL;
+
+ *buffer = 0;
+ return UNKNOWN_ERROR;
+ }
+
+ *buffer = header;
+
+ return OK;
+}
+
+status_t OMX::allocate_buffer(
+ node_id node, OMX_U32 port_index, size_t size,
+ buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ BufferMeta *buffer_meta = new BufferMeta(this, size);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err =
+ OMX_AllocateBuffer(node_meta->handle(), &header, port_index,
+ buffer_meta, size);
+
+ if (err != OMX_ErrorNone) {
+ delete buffer_meta;
+ buffer_meta = NULL;
+
+ *buffer = 0;
+ return UNKNOWN_ERROR;
+ }
+
+ *buffer = header;
+
+ return OK;
+}
+
+status_t OMX::allocate_buffer_with_backup(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ BufferMeta *buffer_meta = new BufferMeta(this, params, true);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err =
+ OMX_AllocateBuffer(
+ node_meta->handle(), &header, port_index, buffer_meta,
+ params->size());
+
+ if (err != OMX_ErrorNone) {
+ delete buffer_meta;
+ buffer_meta = NULL;
+
+ *buffer = 0;
+ return UNKNOWN_ERROR;
+ }
+
+ *buffer = header;
+
+ return OK;
+}
+
+status_t OMX::free_buffer(node_id node, OMX_U32 port_index, buffer_id buffer) {
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate);
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+ OMX_ERRORTYPE err =
+ OMX_FreeBuffer(node_meta->handle(), port_index, header);
+
+ delete buffer_meta;
+ buffer_meta = NULL;
+
+ return (err != OMX_ErrorNone) ? UNKNOWN_ERROR : OK;
+}
+
+OMX_ERRORTYPE OMX::OnEvent(
+ NodeMeta *meta,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData) {
+ LOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2);
+
+ omx_message msg;
+ msg.type = omx_message::EVENT;
+ msg.u.event_data.node = meta;
+ msg.u.event_data.event = eEvent;
+ msg.u.event_data.data1 = nData1;
+ msg.u.event_data.data2 = nData2;
+
+#if !IOMX_USES_SOCKETS
+ sp<IOMXObserver> observer = meta->observer();
+ if (observer.get() != NULL) {
+ observer->on_message(msg);
+ }
+#else
+ assert(mSock >= 0);
+
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMX::OnEmptyBufferDone(
+ NodeMeta *meta, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ LOGV("OnEmptyBufferDone buffer=%p", pBuffer);
+
+ omx_message msg;
+ msg.type = omx_message::EMPTY_BUFFER_DONE;
+ msg.u.buffer_data.node = meta;
+ msg.u.buffer_data.buffer = pBuffer;
+
+#if !IOMX_USES_SOCKETS
+ sp<IOMXObserver> observer = meta->observer();
+ if (observer.get() != NULL) {
+ observer->on_message(msg);
+ }
+#else
+ assert(mSock >= 0);
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMX::OnFillBufferDone(
+ NodeMeta *meta, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ LOGV("OnFillBufferDone buffer=%p", pBuffer);
+ BufferMeta *buffer_meta = static_cast<BufferMeta *>(pBuffer->pAppPrivate);
+ buffer_meta->CopyFromOMX(pBuffer);
+
+ omx_message msg;
+ msg.type = omx_message::FILL_BUFFER_DONE;
+ msg.u.extended_buffer_data.node = meta;
+ msg.u.extended_buffer_data.buffer = pBuffer;
+ msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
+ msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
+ msg.u.extended_buffer_data.flags = pBuffer->nFlags;
+ msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
+ msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate;
+
+#if !IOMX_USES_SOCKETS
+ sp<IOMXObserver> observer = meta->observer();
+ if (observer.get() != NULL) {
+ observer->on_message(msg);
+ }
+#else
+ assert(mSock >= 0);
+
+ ssize_t n = send(mSock, &msg, sizeof(msg), 0);
+ assert(n > 0 && static_cast<size_t>(n) == sizeof(msg));
+#endif
+
+ return OMX_ErrorNone;
+}
+
+#if !IOMX_USES_SOCKETS
+status_t OMX::observe_node(
+ node_id node, const sp<IOMXObserver> &observer) {
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+
+ node_meta->setObserver(observer);
+
+ return OK;
+}
+
+void OMX::fill_buffer(node_id node, buffer_id buffer) {
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ header->nFilledLen = 0;
+ header->nOffset = 0;
+ header->nFlags = 0;
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+
+ OMX_ERRORTYPE err =
+ OMX_FillThisBuffer(node_meta->handle(), header);
+ assert(err == OMX_ErrorNone);
+}
+
+void OMX::empty_buffer(
+ node_id node,
+ buffer_id buffer,
+ OMX_U32 range_offset, OMX_U32 range_length,
+ OMX_U32 flags, OMX_TICKS timestamp) {
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ header->nFilledLen = range_length;
+ header->nOffset = range_offset;
+ header->nFlags = flags;
+ header->nTimeStamp = timestamp;
+
+ BufferMeta *buffer_meta =
+ static_cast<BufferMeta *>(header->pAppPrivate);
+ buffer_meta->CopyToOMX(header);
+
+ NodeMeta *node_meta = static_cast<NodeMeta *>(node);
+
+ OMX_ERRORTYPE err =
+ OMX_EmptyThisBuffer(node_meta->handle(), header);
+ assert(err == OMX_ErrorNone);
+}
+#endif
+
+} // namespace android
+
diff --git a/media/libstagefright/omx/OMX.h b/media/libstagefright/omx/OMX.h
new file mode 100644
index 0000000..ed4e5dd
--- /dev/null
+++ b/media/libstagefright/omx/OMX.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_OMX_H_
+#define ANDROID_OMX_H_
+
+#include <pthread.h>
+
+#include <media/IOMX.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class NodeMeta;
+
+class OMX : public BnOMX {
+public:
+ OMX();
+ virtual ~OMX();
+
+#if IOMX_USES_SOCKETS
+ virtual status_t connect(int *sd);
+#endif
+
+ virtual status_t list_nodes(List<String8> *list);
+
+ virtual status_t allocate_node(const char *name, node_id *node);
+ virtual status_t free_node(node_id node);
+
+ virtual status_t send_command(
+ node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param);
+
+ virtual status_t get_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ void *params, size_t size);
+
+ virtual status_t set_parameter(
+ node_id node, OMX_INDEXTYPE index,
+ const void *params, size_t size);
+
+ virtual status_t use_buffer(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer);
+
+ virtual status_t allocate_buffer(
+ node_id node, OMX_U32 port_index, size_t size,
+ buffer_id *buffer);
+
+ virtual status_t allocate_buffer_with_backup(
+ node_id node, OMX_U32 port_index, const sp<IMemory> &params,
+ buffer_id *buffer);
+
+ virtual status_t free_buffer(
+ node_id node, OMX_U32 port_index, buffer_id buffer);
+
+#if !IOMX_USES_SOCKETS
+ virtual status_t observe_node(
+ node_id node, const sp<IOMXObserver> &observer);
+
+ virtual void fill_buffer(node_id node, buffer_id buffer);
+
+ virtual void empty_buffer(
+ node_id node,
+ buffer_id buffer,
+ OMX_U32 range_offset, OMX_U32 range_length,
+ OMX_U32 flags, OMX_TICKS timestamp);
+#endif
+
+private:
+ static OMX_CALLBACKTYPE kCallbacks;
+
+#if IOMX_USES_SOCKETS
+ int mSock;
+ pthread_t mThread;
+
+ static void *ThreadWrapper(void *me);
+ void threadEntry();
+#endif
+
+ Mutex mLock;
+
+ static OMX_ERRORTYPE OnEvent(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData);
+
+ static OMX_ERRORTYPE OnEmptyBufferDone(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
+
+ static OMX_ERRORTYPE OnFillBufferDone(
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
+
+ OMX_ERRORTYPE OnEvent(
+ NodeMeta *meta,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData);
+
+ OMX_ERRORTYPE OnEmptyBufferDone(
+ NodeMeta *meta, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+
+ OMX_ERRORTYPE OnFillBufferDone(
+ NodeMeta *meta, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+
+ OMX(const OMX &);
+ OMX &operator=(const OMX &);
+};
+
+} // namespace android
+
+#endif // ANDROID_OMX_H_
diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp
new file mode 100644
index 0000000..5b16784
--- /dev/null
+++ b/media/libstagefright/string.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/string.h>
+
+namespace android {
+
+// static
+string::size_type string::npos = (string::size_type)-1;
+
+string::string() {
+}
+
+string::string(const char *s, size_t length)
+ : mString(s, length) {
+}
+
+string::string(const string &from, size_type start, size_type length)
+ : mString(from.c_str() + start, length) {
+}
+
+string::string(const char *s)
+ : mString(s) {
+}
+
+const char *string::c_str() const {
+ return mString.string();
+}
+
+string::size_type string::size() const {
+ return mString.length();
+}
+
+void string::clear() {
+ mString = String8();
+}
+
+string::size_type string::find(char c) const {
+ char s[2];
+ s[0] = c;
+ s[1] = '\0';
+
+ ssize_t index = mString.find(s);
+
+ return index < 0 ? npos : (size_type)index;
+}
+
+bool string::operator<(const string &other) const {
+ return mString < other.mString;
+}
+
+bool string::operator==(const string &other) const {
+ return mString == other.mString;
+}
+
+string &string::operator+=(char c) {
+ mString.append(&c, 1);
+
+ return *this;
+}
+
+void string::erase(size_t from, size_t length) {
+ String8 s(mString.string(), from);
+ s.append(mString.string() + from + length);
+
+ mString = s;
+}
+
+} // namespace android
+
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index c681698..a92cea8 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -8,7 +8,8 @@ LOCAL_SHARED_LIBRARIES := \
libaudioflinger \
libcameraservice \
libmediaplayerservice \
- libutils
+ libutils \
+ libbinder
base := $(LOCAL_PATH)/../..
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 6954b63..7094cfa 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -20,14 +20,15 @@
#include <unistd.h>
#include <grp.h>
-#include <utils/IPCThreadState.h>
-#include <utils/ProcessState.h>
-#include <utils/IServiceManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
#include <utils/Log.h>
#include <AudioFlinger.h>
#include <CameraService.h>
#include <MediaPlayerService.h>
+#include <AudioPolicyService.h>
#include <private/android_filesystem_config.h>
using namespace android;
@@ -40,6 +41,7 @@ int main(int argc, char** argv)
AudioFlinger::instantiate();
MediaPlayerService::instantiate();
CameraService::instantiate();
+ AudioPolicyService::instantiate();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
}
diff --git a/media/sdutils/sdutil.cpp b/media/sdutils/sdutil.cpp
index 06120f5..fe11878 100644
--- a/media/sdutils/sdutil.cpp
+++ b/media/sdutils/sdutil.cpp
@@ -15,8 +15,8 @@
*/
#include <hardware_legacy/IMountService.h>
-#include <utils/BpBinder.h>
-#include <utils/IServiceManager.h>
+#include <binder/BpBinder.h>
+#include <binder/IServiceManager.h>
#include <stdio.h>
#include <stdlib.h>
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
index 6edc2cc..2a4e9a0 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
@@ -23,6 +23,7 @@ import com.android.mediaframeworktest.functional.MediaMimeTest;
import com.android.mediaframeworktest.functional.MediaPlayerApiTest;
import com.android.mediaframeworktest.functional.MediaRecorderTest;
import com.android.mediaframeworktest.functional.SimTonesTest;
+import com.android.mediaframeworktest.functional.MediaPlayerInvokeTest;
import junit.framework.TestSuite;
@@ -32,7 +33,7 @@ import android.test.InstrumentationTestSuite;
/**
* Instrumentation Test Runner for all MediaPlayer tests.
- *
+ *
* Running all tests:
*
* adb shell am instrument \
@@ -52,6 +53,7 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner {
suite.addTestSuite(MediaRecorderTest.class);
suite.addTestSuite(MediaAudioTrackTest.class);
suite.addTestSuite(MediaMimeTest.class);
+ suite.addTestSuite(MediaPlayerInvokeTest.class);
return suite;
}
@@ -60,5 +62,3 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner {
return MediaFrameworkTestRunner.class.getClassLoader();
}
}
-
-
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
index 81d59da..a203adc 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
@@ -24,16 +24,16 @@ import junit.framework.TestSuite;
/**
* Instrumentation Test Runner for all media framework unit tests.
- *
+ *
* Make sure that MediaFrameworkUnitTestRunner has been added to
* AndroidManifest.xml file, and then "make -j4 mediaframeworktest; adb sync"
* to build and upload mediaframeworktest to the phone or emulator.
- *
+ *
* Example on running all unit tests for a single class:
* adb shell am instrument -e class \
- * com.android.mediaframeworktest.unit.MediaMetadataRetrieverUnitTest \
+ * com.android.mediaframeworktest.unit.MediaMetadataRetrieverUnitTest \
* -w com.android.mediaframeworktest/.MediaFrameworkUnitTestRunner
- *
+ *
* Example on running all unit tests for the media framework:
* adb shell am instrument \
* -w com.android.mediaframeworktest/.MediaFrameworkUnitTestRunner
@@ -54,12 +54,12 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner {
public ClassLoader getLoader() {
return MediaFrameworkUnitTestRunner.class.getClassLoader();
}
-
+
// Running all unit tests checking the state machine may be time-consuming.
private void addMediaMetadataRetrieverStateUnitTests(TestSuite suite) {
suite.addTestSuite(MediaMetadataRetrieverTest.class);
}
-
+
// Running all unit tests checking the state machine may be time-consuming.
private void addMediaRecorderStateUnitTests(TestSuite suite) {
suite.addTestSuite(MediaRecorderPrepareStateUnitTest.class);
@@ -87,5 +87,6 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner {
suite.addTestSuite(MediaPlayerSetLoopingStateUnitTest.class);
suite.addTestSuite(MediaPlayerSetAudioStreamTypeStateUnitTest.class);
suite.addTestSuite(MediaPlayerSetVolumeStateUnitTest.class);
+ suite.addTestSuite(MediaPlayerMetadataParserTest.class);
}
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
index aefedc3..cea3a5a 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
@@ -140,7 +140,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
+ AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorMono16MusicStream: " + res.mResultLog, res.mResult);
@@ -153,7 +153,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
+ AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorStereo16MusicStream: " + res.mResultLog, res.mResult);
@@ -166,7 +166,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
+ AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorMono16MusicStatic: " + res.mResultLog, res.mResult);
@@ -179,7 +179,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
+ AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorStereo16MusicStatic: " + res.mResultLog, res.mResult);
@@ -196,7 +196,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorMono8MusicStream: " + res.mResultLog, res.mResult);
@@ -208,7 +208,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_8BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorStereo8MusicStream: " + res.mResultLog, res.mResult);
@@ -220,7 +220,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorMono8MusicStatic: " + res.mResultLog, res.mResult);
@@ -232,7 +232,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_8BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorStereo8MusicStatic: " + res.mResultLog, res.mResult);
@@ -248,15 +248,15 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
public void testConstructorStreamType() throws Exception {
// constants for test
final int TYPE_TEST_SR = 22050;
- final int TYPE_TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TYPE_TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TYPE_TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TYPE_TEST_MODE = AudioTrack.MODE_STREAM;
final int[] STREAM_TYPES = { AudioManager.STREAM_ALARM, AudioManager.STREAM_BLUETOOTH_SCO,
AudioManager.STREAM_MUSIC, AudioManager.STREAM_NOTIFICATION,
AudioManager.STREAM_RING, AudioManager.STREAM_SYSTEM,
- AudioManager.STREAM_VOICE_CALL };
+ AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_DTMF, };
final String[] STREAM_NAMES = { "STREAM_ALARM", "STREAM_BLUETOOTH_SCO", "STREAM_MUSIC",
- "STREAM_NOTIFICATION", "STREAM_RING", "STREAM_SYSTEM", "STREAM_VOICE_CALL" };
+ "STREAM_NOTIFICATION", "STREAM_RING", "STREAM_SYSTEM", "STREAM_VOICE_CALL", "STREAM_DTMF" };
boolean localTestRes = true;
AudioTrack track = null;
@@ -303,7 +303,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterInit";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -324,7 +324,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionIncrease";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -352,7 +352,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterFlush";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -382,7 +382,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterStop";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -413,7 +413,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterPause";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -448,7 +448,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetStereoVolumeMax";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -474,7 +474,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetStereoVolumeMin";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -500,7 +500,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetStereoVolumeMid";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -526,7 +526,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackRate";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -552,7 +552,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackRateZero";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -574,7 +574,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackRateTwiceOutputSR";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -601,7 +601,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetGetPlaybackRate";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -628,7 +628,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackRateUninit";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -655,7 +655,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackHeadPositionPlaying";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -682,7 +682,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackHeadPositionStopped";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -710,7 +710,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackHeadPositionPaused";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -738,7 +738,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetPlaybackHeadPositionTooFar";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -770,7 +770,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsStream";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -794,7 +794,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsStartAfterEnd";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -818,7 +818,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsSuccess";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -842,7 +842,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsLoopTooLong";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -868,7 +868,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsStartTooFar";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -896,7 +896,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testSetLoopPointsEndTooFar";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -929,7 +929,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByteOffsetTooBig";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -953,7 +953,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShortOffsetTooBig";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -977,7 +977,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByteSizeTooBig";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1001,7 +1001,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShortSizeTooBig";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1025,7 +1025,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByteNegativeOffset";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1049,7 +1049,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShortNegativeOffset";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1073,7 +1073,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByteNegativeSize";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1097,7 +1097,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShortNegativeSize";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1121,7 +1121,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByte";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1145,7 +1145,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShort";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1169,7 +1169,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteByte8bit";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1193,7 +1193,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constants for test
final String TEST_NAME = "testWriteShort8bit";
final int TEST_SR = 22050;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1221,7 +1221,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constant for test
final String TEST_NAME = "testGetMinBufferSizeTooLowSR";
final int TEST_SR = 3999;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
@@ -1238,7 +1238,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// constant for testg
final String TEST_NAME = "testGetMinBufferSizeTooHighSR";
final int TEST_SR = 48001;
- final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java
new file mode 100644
index 0000000..ab8b311
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.functional;
+
+import com.android.mediaframeworktest.MediaFrameworkTest;
+import com.android.mediaframeworktest.MediaNames;
+
+import android.test.ActivityInstrumentationTestCase2;
+import android.util.Log;
+import android.test.suitebuilder.annotation.LargeTest;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.Suppress;
+
+import android.media.MediaPlayer;
+import android.os.Parcel;
+
+import java.util.Calendar;
+import java.util.Random;
+
+// Tests for the invoke method in the MediaPlayer.
+public class MediaPlayerInvokeTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> {
+ private static final String TAG = "MediaPlayerInvokeTest";
+ private MediaPlayer mPlayer;
+ private Random rnd;
+
+ public MediaPlayerInvokeTest() {
+ super("com.android.mediaframeworktest", MediaFrameworkTest.class);
+ rnd = new Random(Calendar.getInstance().getTimeInMillis());
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ mPlayer = new MediaPlayer();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ mPlayer.release();
+ super.tearDown();
+ }
+
+ // Generate a random number, sends it to the ping test player.
+ @MediumTest
+ public void testPing() throws Exception {
+ mPlayer.setDataSource("test:invoke_mock_media_player.so?url=ping");
+
+ Parcel request = mPlayer.newRequest();
+ Parcel reply = Parcel.obtain();
+
+ int val = rnd.nextInt();
+ request.writeInt(val);
+ assertEquals(0, mPlayer.invoke(request, reply));
+ assertEquals(val, reply.readInt());
+ }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
index 84058f5..4adb04e 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
@@ -47,7 +47,7 @@ import android.media.MediaMetadataRetriever;
*/
public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<MediaFrameworkTest> {
- private String TAG = "MediaFrameworkPerformance";
+ private String TAG = "MediaPlayerPerformance";
private SQLiteDatabase mDB;
private SurfaceHolder mSurfaceHolder = null;
@@ -76,9 +76,11 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi
public void createDB() {
mDB = SQLiteDatabase.openOrCreateDatabase("/sdcard/perf.db", null);
- mDB.execSQL("CREATE TABLE perfdata (_id INTEGER PRIMARY KEY," +
+ mDB.execSQL("CREATE TABLE IF NOT EXISTS perfdata (_id INTEGER PRIMARY KEY," +
"file TEXT," + "setdatatime LONG," + "preparetime LONG," +
"playtime LONG" + ");");
+ //clean the table before adding new data
+ mDB.execSQL("DELETE FROM perfdata");
}
public void audioPlaybackStartupTime(String[] testFile) {
@@ -137,6 +139,10 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi
audioPlaybackStartupTime(MediaNames.MP3FILES);
audioPlaybackStartupTime(MediaNames.AACFILES);
+ //close the database after all transactions
+ if (mDB.isOpen()) {
+ mDB.close();
+ }
}
public void wmametadatautility(String[] testFile) {
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java
new file mode 100644
index 0000000..38f598a
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java
@@ -0,0 +1,432 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.unit;
+import android.media.Metadata;
+import android.os.Parcel;
+import android.test.AndroidTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import java.util.Calendar;
+import java.util.Date;
+
+/*
+ * Check the Java layer that parses serialized metadata in Parcel
+ * works as expected.
+ *
+ */
+
+public class MediaPlayerMetadataParserTest extends AndroidTestCase {
+ private static final String TAG = "MediaPlayerMetadataTest";
+ private static final int kMarker = 0x4d455441; // 'M' 'E' 'T' 'A'
+ private static final int kHeaderSize = 8;
+
+ private Metadata mMetadata = null;
+ private Parcel mParcel = null;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ mMetadata = new Metadata();
+ mParcel = Parcel.obtain();
+
+ resetParcel();
+ }
+
+ // Check parsing of the parcel fails. Make sure the parser rewind
+ // the parcel properly.
+ private void assertParseFail() throws Exception {
+ mParcel.setDataPosition(0);
+ assertFalse(mMetadata.parse(mParcel));
+ assertEquals(0, mParcel.dataPosition());
+ }
+
+ // Check parsing of the parcel is successful.
+ private void assertParse() throws Exception {
+ mParcel.setDataPosition(0);
+ assertTrue(mMetadata.parse(mParcel));
+ }
+
+ // Write the number of bytes from the start of the parcel to the
+ // current position at the beginning of the parcel (offset 0).
+ private void adjustSize() {
+ adjustSize(0);
+ }
+
+ // Write the number of bytes from the offset to the current
+ // position at position pointed by offset.
+ private void adjustSize(int offset) {
+ final int pos = mParcel.dataPosition();
+
+ mParcel.setDataPosition(offset);
+ mParcel.writeInt(pos - offset);
+ mParcel.setDataPosition(pos);
+ }
+
+ // Rewind the parcel and insert the header.
+ private void resetParcel() {
+ mParcel.setDataPosition(0);
+ // Most tests will use a properly formed parcel with a size
+ // and the meta marker so we add them by default.
+ mParcel.writeInt(-1); // Placeholder for the size
+ mParcel.writeInt(kMarker);
+ }
+
+ // ----------------------------------------------------------------------
+ // START OF THE TESTS
+
+
+ // There should be at least 8 bytes in the parcel, 4 for the size
+ // and 4 for the 'M' 'E' 'T' 'A' marker.
+ @SmallTest
+ public void testMissingSizeAndMarker() throws Exception {
+ for (int i = 0; i < kHeaderSize; ++i) {
+ mParcel.setDataPosition(0);
+ mParcel.setDataSize(i);
+
+ assertEquals(i, mParcel.dataAvail());
+ assertParseFail();
+ }
+ }
+
+ // There should be at least 'size' bytes in the parcel.
+ @SmallTest
+ public void testMissingData() throws Exception {
+ final int size = 20;
+
+ mParcel.writeInt(size);
+ mParcel.setDataSize(size - 1);
+ assertParseFail();
+ }
+
+ // Empty parcel is fine
+ @SmallTest
+ public void testEmptyIsOk() throws Exception {
+ adjustSize();
+ assertParse();
+ }
+
+ // ----------------------------------------------------------------------
+ // RECORDS
+ // ----------------------------------------------------------------------
+
+ // A record header should be at least 12 bytes long
+ @SmallTest
+ public void testRecordMissingId() throws Exception {
+ mParcel.writeInt(13); // record length
+ // misses metadata id and metadata type.
+ adjustSize();
+ assertParseFail();
+ }
+
+ @SmallTest
+ public void testRecordMissingType() throws Exception {
+ mParcel.writeInt(13); // record length lies
+ mParcel.writeInt(Metadata.TITLE);
+ // misses metadata type
+ adjustSize();
+ assertParseFail();
+ }
+
+ @SmallTest
+ public void testRecordWithZeroPayload() throws Exception {
+ mParcel.writeInt(0);
+ adjustSize();
+ assertParseFail();
+ }
+
+ // A record cannot be empty.
+ @SmallTest
+ public void testRecordMissingPayload() throws Exception {
+ mParcel.writeInt(12);
+ mParcel.writeInt(Metadata.TITLE);
+ mParcel.writeInt(Metadata.STRING_VAL);
+ // misses payload
+ adjustSize();
+ assertParseFail();
+ }
+
+ // Check records can be found.
+ @SmallTest
+ public void testRecordsFound() throws Exception {
+ writeStringRecord(Metadata.TITLE, "a title");
+ writeStringRecord(Metadata.GENRE, "comedy");
+ writeStringRecord(Metadata.firstCustomId(), "custom");
+ adjustSize();
+ assertParse();
+ assertTrue(mMetadata.has(Metadata.TITLE));
+ assertTrue(mMetadata.has(Metadata.GENRE));
+ assertTrue(mMetadata.has(Metadata.firstCustomId()));
+ assertFalse(mMetadata.has(Metadata.DRM_CRIPPLED));
+ assertEquals(3, mMetadata.keySet().size());
+ }
+
+ // Detects bad metadata type
+ @SmallTest
+ public void testBadMetadataType() throws Exception {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(Metadata.TITLE);
+ mParcel.writeInt(0); // Invalid type.
+ mParcel.writeString("dummy");
+ adjustSize(start);
+
+ adjustSize();
+ assertParseFail();
+ }
+
+ // Check a Metadata instance can be reused, i.e the parse method
+ // wipes out the existing states/keys.
+ @SmallTest
+ public void testParseClearState() throws Exception {
+ writeStringRecord(Metadata.TITLE, "a title");
+ writeStringRecord(Metadata.GENRE, "comedy");
+ writeStringRecord(Metadata.firstCustomId(), "custom");
+ adjustSize();
+ assertParse();
+
+ resetParcel();
+ writeStringRecord(Metadata.MIME_TYPE, "audio/mpg");
+ adjustSize();
+ assertParse();
+
+ // Only the mime type metadata should be present.
+ assertEquals(1, mMetadata.keySet().size());
+ assertTrue(mMetadata.has(Metadata.MIME_TYPE));
+
+ assertFalse(mMetadata.has(Metadata.TITLE));
+ assertFalse(mMetadata.has(Metadata.GENRE));
+ assertFalse(mMetadata.has(Metadata.firstCustomId()));
+ }
+
+ // ----------------------------------------------------------------------
+ // GETTERS
+ // ----------------------------------------------------------------------
+
+ // getString
+ @SmallTest
+ public void testGetString() throws Exception {
+ writeStringRecord(Metadata.TITLE, "a title");
+ writeStringRecord(Metadata.GENRE, "comedy");
+ adjustSize();
+ assertParse();
+
+ assertEquals("a title", mMetadata.getString(Metadata.TITLE));
+ assertEquals("comedy", mMetadata.getString(Metadata.GENRE));
+ }
+
+ // get an empty string.
+ @SmallTest
+ public void testGetEmptyString() throws Exception {
+ writeStringRecord(Metadata.TITLE, "");
+ adjustSize();
+ assertParse();
+
+ assertEquals("", mMetadata.getString(Metadata.TITLE));
+ }
+
+ // get a string when a NULL value was in the parcel
+ @SmallTest
+ public void testGetNullString() throws Exception {
+ writeStringRecord(Metadata.TITLE, null);
+ adjustSize();
+ assertParse();
+
+ assertEquals(null, mMetadata.getString(Metadata.TITLE));
+ }
+
+ // get a string when an integer is actually present
+ @SmallTest
+ public void testWrongType() throws Exception {
+ writeIntRecord(Metadata.DURATION, 5);
+ adjustSize();
+ assertParse();
+
+ try {
+ mMetadata.getString(Metadata.DURATION);
+ } catch (IllegalStateException ise) {
+ return;
+ }
+ fail("Exception was not thrown");
+ }
+
+ // getInt
+ @SmallTest
+ public void testGetInt() throws Exception {
+ writeIntRecord(Metadata.CD_TRACK_NUM, 1);
+ adjustSize();
+ assertParse();
+
+ assertEquals(1, mMetadata.getInt(Metadata.CD_TRACK_NUM));
+ }
+
+ // getBoolean
+ @SmallTest
+ public void testGetBoolean() throws Exception {
+ writeBooleanRecord(Metadata.DRM_CRIPPLED, true);
+ adjustSize();
+ assertParse();
+
+ assertEquals(true, mMetadata.getBoolean(Metadata.DRM_CRIPPLED));
+ }
+
+ // getLong
+ @SmallTest
+ public void testGetLong() throws Exception {
+ writeLongRecord(Metadata.DURATION, 1L);
+ adjustSize();
+ assertParse();
+
+ assertEquals(1L, mMetadata.getLong(Metadata.DURATION));
+ }
+
+ // getDouble
+ @SmallTest
+ public void testGetDouble() throws Exception {
+ writeDoubleRecord(Metadata.VIDEO_FRAME_RATE, 29.97);
+ adjustSize();
+ assertParse();
+
+ assertEquals(29.97, mMetadata.getDouble(Metadata.VIDEO_FRAME_RATE));
+ }
+
+ // getByteArray
+ @SmallTest
+ public void testGetByteArray() throws Exception {
+ byte data[] = new byte[]{1,2,3,4,5};
+
+ writeByteArrayRecord(Metadata.ALBUM_ART, data);
+ adjustSize();
+ assertParse();
+
+ byte res[] = mMetadata.getByteArray(Metadata.ALBUM_ART);
+ for (int i = 0; i < data.length; ++i) {
+ assertEquals(data[i], res[i]);
+ }
+ }
+
+ // getDate
+ @SmallTest
+ public void testGetDate() throws Exception {
+ writeDateRecord(Metadata.DATE, 0, "PST");
+ adjustSize();
+ assertParse();
+
+ assertEquals(new Date(0), mMetadata.getDate(Metadata.DATE));
+ }
+
+ // getTimedText
+ @SmallTest
+ public void testGetTimedText() throws Exception {
+ Date now = Calendar.getInstance().getTime();
+ writeTimedTextRecord(Metadata.CAPTION, now.getTime(),
+ 10, "Some caption");
+ adjustSize();
+ assertParse();
+
+ Metadata.TimedText caption = mMetadata.getTimedText(Metadata.CAPTION);
+ assertEquals("" + now + "-" + 10 + ":Some caption", caption.toString());
+ }
+
+ // ----------------------------------------------------------------------
+ // HELPERS TO APPEND RECORDS
+ // ----------------------------------------------------------------------
+
+ // Insert a string record at the current position.
+ private void writeStringRecord(int metadataId, String val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.STRING_VAL);
+ mParcel.writeString(val);
+ adjustSize(start);
+ }
+
+ // Insert an int record at the current position.
+ private void writeIntRecord(int metadataId, int val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.INTEGER_VAL);
+ mParcel.writeInt(val);
+ adjustSize(start);
+ }
+
+ // Insert a boolean record at the current position.
+ private void writeBooleanRecord(int metadataId, boolean val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.BOOLEAN_VAL);
+ mParcel.writeInt(val ? 1 : 0);
+ adjustSize(start);
+ }
+
+ // Insert a Long record at the current position.
+ private void writeLongRecord(int metadataId, long val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.LONG_VAL);
+ mParcel.writeLong(val);
+ adjustSize(start);
+ }
+
+ // Insert a Double record at the current position.
+ private void writeDoubleRecord(int metadataId, double val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.DOUBLE_VAL);
+ mParcel.writeDouble(val);
+ adjustSize(start);
+ }
+
+ // Insert a ByteArray record at the current position.
+ private void writeByteArrayRecord(int metadataId, byte[] val) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.BYTE_ARRAY_VAL);
+ mParcel.writeByteArray(val);
+ adjustSize(start);
+ }
+
+ // Insert a Date record at the current position.
+ private void writeDateRecord(int metadataId, long time, String tz) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.DATE_VAL);
+ mParcel.writeLong(time);
+ mParcel.writeString(tz);
+ adjustSize(start);
+ }
+
+ // Insert a TimedText record at the current position.
+ private void writeTimedTextRecord(int metadataId, long begin,
+ int duration, String text) {
+ final int start = mParcel.dataPosition();
+ mParcel.writeInt(-1); // Placeholder for the length
+ mParcel.writeInt(metadataId);
+ mParcel.writeInt(Metadata.TIMED_TEXT_VAL);
+ mParcel.writeLong(begin);
+ mParcel.writeInt(duration);
+ mParcel.writeString(text);
+ adjustSize(start);
+ }
+}
diff --git a/media/tests/players/Android.mk b/media/tests/players/Android.mk
new file mode 100644
index 0000000..eb50a51
--- /dev/null
+++ b/media/tests/players/Android.mk
@@ -0,0 +1,29 @@
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= invoke_mock_media_player.cpp
+
+LOCAL_SHARED_LIBRARIES:= \
+ libbinder \
+ libutils
+
+LOCAL_MODULE:= invoke_mock_media_player
+LOCAL_MODULE_TAGS := test eng
+LOCAL_PRELINK_MODULE:= false
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/tests/players/README b/media/tests/players/README
new file mode 100644
index 0000000..edf9bd6
--- /dev/null
+++ b/media/tests/players/README
@@ -0,0 +1,8 @@
+Native test players for system tests.
+
+For functional/system/performance tests, a native test player can be used.
+This directory contains the sources of such players.
+The class TestPlayerStub uses the dynamic loader to load any of them.
+
+
+
diff --git a/media/tests/players/invoke_mock_media_player.cpp b/media/tests/players/invoke_mock_media_player.cpp
new file mode 100644
index 0000000..77bb5b2
--- /dev/null
+++ b/media/tests/players/invoke_mock_media_player.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TestPlayerStub"
+#include "utils/Log.h"
+
+#include <string.h>
+
+#include <binder/Parcel.h>
+#include <media/MediaPlayerInterface.h>
+#include <utils/Errors.h>
+
+using android::INVALID_OPERATION;
+using android::ISurface;
+using android::MediaPlayerBase;
+using android::OK;
+using android::Parcel;
+using android::SortedVector;
+using android::TEST_PLAYER;
+using android::UNKNOWN_ERROR;
+using android::player_type;
+using android::sp;
+using android::status_t;
+
+// This file contains a test player that is loaded via the
+// TestPlayerStub class. The player contains various implementation
+// of the invoke method that java tests can use.
+
+namespace {
+const char *kPing = "ping";
+
+class Player: public MediaPlayerBase
+{
+ public:
+ enum TestType {TEST_UNKNOWN, PING};
+ Player() {}
+ virtual ~Player() {}
+
+ virtual status_t initCheck() {return OK;}
+ virtual bool hardwareOutput() {return true;}
+
+ virtual status_t setDataSource(const char *url) {
+ LOGV("setDataSource %s", url);
+ mTest = TEST_UNKNOWN;
+ if (strncmp(url, kPing, strlen(kPing)) == 0) {
+ mTest = PING;
+ }
+ return OK;
+ }
+
+ virtual status_t setDataSource(int fd, int64_t offset, int64_t length) {return OK;}
+ virtual status_t setVideoSurface(const sp<ISurface>& surface) {return OK;}
+ virtual status_t prepare() {return OK;}
+ virtual status_t prepareAsync() {return OK;}
+ virtual status_t start() {return OK;}
+ virtual status_t stop() {return OK;}
+ virtual status_t pause() {return OK;}
+ virtual bool isPlaying() {return true;}
+ virtual status_t seekTo(int msec) {return OK;}
+ virtual status_t getCurrentPosition(int *msec) {return OK;}
+ virtual status_t getDuration(int *msec) {return OK;}
+ virtual status_t reset() {return OK;}
+ virtual status_t setLooping(int loop) {return OK;}
+ virtual player_type playerType() {return TEST_PLAYER;}
+ virtual status_t invoke(const Parcel& request, Parcel *reply);
+
+ private:
+ // Take a request, copy it to the reply.
+ void ping(const Parcel& request, Parcel *reply);
+
+ status_t mStatus;
+ TestType mTest;
+};
+
+status_t Player::invoke(const Parcel& request, Parcel *reply)
+{
+ switch (mTest) {
+ case PING:
+ ping(request, reply);
+ break;
+ default: mStatus = UNKNOWN_ERROR;
+ }
+ return mStatus;
+}
+
+void Player::ping(const Parcel& request, Parcel *reply)
+{
+ const size_t len = request.dataAvail();
+
+ reply->setData(static_cast<const uint8_t*>(request.readInplace(len)), len);
+ mStatus = OK;
+}
+
+}
+
+extern "C" android::MediaPlayerBase* newPlayer()
+{
+ LOGD("New invoke test player");
+ return new Player();
+}
+
+extern "C" android::status_t deletePlayer(android::MediaPlayerBase *player)
+{
+ LOGD("Delete invoke test player");
+ delete player;
+ return OK;
+}