summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioTrack.java149
-rw-r--r--media/java/android/media/SoundPool.java469
-rw-r--r--media/jni/soundpool/Android.mk2
-rw-r--r--media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp (renamed from media/jni/soundpool/android_media_SoundPool.cpp)96
4 files changed, 480 insertions, 236 deletions
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 399eb7b..2a49f85 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -26,7 +26,7 @@ import android.util.Log;
/**
* The AudioTrack class manages and plays a single audio resource for Java applications.
- * It allows streaming PCM audio buffers to the audio hardware for playback. This is
+ * It allows streaming of PCM audio buffers to the audio sink for playback. This is
* achieved by "pushing" the data to the AudioTrack object using one of the
* {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
*
@@ -53,8 +53,10 @@ import android.util.Log;
* can play before running out of data.<br>
* For an AudioTrack using the static mode, this size is the maximum size of the sound that can
* be played from it.<br>
- * For the streaming mode, data will be written to the hardware in chunks of
+ * For the streaming mode, data will be written to the audio sink in chunks of
* sizes less than or equal to the total buffer size.
+ *
+ * AudioTrack is not final and thus permits subclasses, but such use is not recommended.
*/
public class AudioTrack
{
@@ -130,7 +132,7 @@ public class AudioTrack
private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20;
// Events:
- // to keep in sync with frameworks/base/include/media/AudioTrack.h
+ // to keep in sync with frameworks/av/include/media/AudioTrack.h
/**
* Event id denotes when playback head has reached a previously set marker.
*/
@@ -159,9 +161,10 @@ public class AudioTrack
*/
private final Object mPlayStateLock = new Object();
/**
- * Size of the native audio buffer.
+ * Sizes of the native audio buffer.
*/
private int mNativeBufferSizeInBytes = 0;
+ private int mNativeBufferSizeInFrames = 0;
/**
* Handler for marker events coming from the native code.
*/
@@ -171,7 +174,7 @@ public class AudioTrack
*/
private final Looper mInitializationLooper;
/**
- * The audio data sampling rate in Hz.
+ * The audio data source sampling rate in Hz.
*/
private int mSampleRate; // initialized by all constructors
/**
@@ -192,7 +195,7 @@ public class AudioTrack
*/
private int mStreamType = AudioManager.STREAM_MUSIC;
/**
- * The way audio is consumed by the hardware, streaming or static.
+ * The way audio is consumed by the audio sink, streaming or static.
*/
private int mDataLoadMode = MODE_STREAM;
/**
@@ -236,17 +239,20 @@ public class AudioTrack
* {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
* {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
* {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
- * @param sampleRateInHz the sample rate expressed in Hertz.
+ * @param sampleRateInHz the initial source sample rate expressed in Hz.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_OUT_MONO} and
* {@link AudioFormat#CHANNEL_OUT_STEREO}
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
- * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
- * from for playback. If using the AudioTrack in streaming mode, you can write data into
- * this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
- * this is the maximum size of the sound that will be played for this instance.
+ * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
+ * read from for playback.
+ * If track's creation mode is {@link #MODE_STREAM}, you can write data into
+ * this buffer in chunks less than or equal to this size, and it is typical to use
+ * chunks of 1/2 of the total size to permit double-buffering.
+ * If the track's creation mode is {@link #MODE_STATIC},
+ * this is the maximum length sample, or audio clip, that can be played by this instance.
* See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
* for the successful creation of an AudioTrack instance in streaming mode. Using values
* smaller than getMinBufferSize() will result in an initialization failure.
@@ -257,7 +263,7 @@ public class AudioTrack
int bufferSizeInBytes, int mode)
throws IllegalArgumentException {
this(streamType, sampleRateInHz, channelConfig, audioFormat,
- bufferSizeInBytes, mode, 0);
+ bufferSizeInBytes, mode, 0 /*session*/);
}
/**
@@ -267,7 +273,7 @@ public class AudioTrack
* is provided when creating an AudioEffect, this effect will be applied only to audio tracks
* and media players in the same session and not to the output mix.
* When an AudioTrack is created without specifying a session, it will create its own session
- * which can be retreived by calling the {@link #getAudioSessionId()} method.
+ * which can be retrieved by calling the {@link #getAudioSessionId()} method.
* If a non-zero session ID is provided, this AudioTrack will share effects attached to this
* session
* with all other media players or audio tracks in the same session, otherwise a new session
@@ -276,7 +282,7 @@ public class AudioTrack
* {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
* {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
* {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
- * @param sampleRateInHz the sample rate expressed in Hertz.
+ * @param sampleRateInHz the initial source sample rate expressed in Hz.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_OUT_MONO} and
* {@link AudioFormat#CHANNEL_OUT_STEREO}
@@ -464,7 +470,7 @@ public class AudioTrack
}
- // Convenience method for the contructor's audio buffer size check.
+ // Convenience method for the constructor's audio buffer size check.
// preconditions:
// mChannelCount is valid
// mAudioFormat is valid
@@ -480,6 +486,7 @@ public class AudioTrack
}
mNativeBufferSizeInBytes = audioBufferSize;
+ mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
}
@@ -559,7 +566,6 @@ public class AudioTrack
/**
* Returns the configured channel configuration.
-
* See {@link AudioFormat#CHANNEL_OUT_MONO}
* and {@link AudioFormat#CHANNEL_OUT_STEREO}.
*/
@@ -577,8 +583,7 @@ public class AudioTrack
/**
* Returns the state of the AudioTrack instance. This is useful after the
* AudioTrack instance has been created to check if it was initialized
- * properly. This ensures that the appropriate hardware resources have been
- * acquired.
+ * properly. This ensures that the appropriate resources have been acquired.
* @see #STATE_INITIALIZED
* @see #STATE_NO_STATIC_DATA
* @see #STATE_UNINITIALIZED
@@ -600,14 +605,26 @@ public class AudioTrack
}
/**
- * Returns the native frame count used by the hardware.
+ * Returns the "native frame count", derived from the bufferSizeInBytes specified at
+ * creation time and converted to frame units.
+ * If track's creation mode is {@link #MODE_STATIC},
+ * it is equal to the specified bufferSizeInBytes converted to frame units.
+ * If track's creation mode is {@link #MODE_STREAM},
+ * it is typically greater than or equal to the specified bufferSizeInBytes converted to frame
+ * units; it may be rounded up to a larger value if needed by the target device implementation.
+ * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
+ * See {@link AudioManager#getProperty(String)} for key
+ * {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
*/
+ @Deprecated
protected int getNativeFrameCount() {
return native_get_native_frame_count();
}
/**
* Returns marker position expressed in frames.
+ * @return marker position in wrapping frame units similar to {@link #getPlaybackHeadPosition},
+ * or zero if marker is disabled.
*/
public int getNotificationMarkerPosition() {
return native_get_marker_pos();
@@ -615,20 +632,26 @@ public class AudioTrack
/**
* Returns the notification update period expressed in frames.
+ * Zero means that no position update notifications are being delivered.
*/
public int getPositionNotificationPeriod() {
return native_get_pos_update_period();
}
/**
- * Returns the playback head position expressed in frames
+ * Returns the playback head position expressed in frames.
+ * Though the "int" type is signed 32-bits, the value should be reinterpreted as if it is
+ * unsigned 32-bits. That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
+ * This is a continuously advancing counter. It will wrap (overflow) periodically,
+ * for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
+ * It is reset to zero by flush(), reload(), and stop().
*/
public int getPlaybackHeadPosition() {
return native_get_position();
}
/**
- * Returns the hardware output sample rate
+ * Returns the output sample rate in Hz for the specified stream type.
*/
static public int getNativeOutputSampleRate(int streamType) {
return native_get_output_sample_rate(streamType);
@@ -639,7 +662,10 @@ public class AudioTrack
* object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
* guarantee a smooth playback under load, and higher values should be chosen according to
* the expected frequency at which the buffer will be refilled with additional data to play.
- * @param sampleRateInHz the sample rate expressed in Hertz.
+ * For example, if you intend to dynamically set the source sample rate of an AudioTrack
+ * to a higher value than the initial source sample rate, be sure to configure the buffer size
+ * based on the highest planned sample rate.
+ * @param sampleRateInHz the source sample rate expressed in Hz.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_OUT_MONO} and
* {@link AudioFormat#CHANNEL_OUT_STEREO}
@@ -647,8 +673,7 @@ public class AudioTrack
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
* @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
- * or {@link #ERROR} if the implementation was unable to query the hardware for its output
- * properties,
+ * or {@link #ERROR} if unable to query for output properties,
* or the minimum buffer size expressed in bytes.
*/
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
@@ -781,10 +806,13 @@ public class AudioTrack
/**
* Sets the playback sample rate for this track. This sets the sampling rate at which
- * the audio data will be consumed and played back, not the original sampling rate of the
- * content. Setting it to half the sample rate of the content will cause the playback to
- * last twice as long, but will also result in a negative pitch shift.
- * The valid sample rate range is from 1Hz to twice the value returned by
+ * the audio data will be consumed and played back
+ * (as set by the sampleRateInHz parameter in the
+ * {@link #AudioTrack(int, int, int, int, int, int)} constructor),
+ * not the original sampling rate of the
+ * content. For example, setting it to half the sample rate of the content will cause the
+ * playback to last twice as long, but will also result in a pitch shift down by one octave.
+ * The valid sample rate range is from 1 Hz to twice the value returned by
* {@link #getNativeOutputSampleRate(int)}.
* @param sampleRateInHz the sample rate expressed in Hz
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
@@ -802,8 +830,11 @@ public class AudioTrack
/**
- * Sets the position of the notification marker.
- * @param markerInFrames marker in frames
+ * Sets the position of the notification marker. At most one marker can be active.
+ * @param markerInFrames marker position in wrapping frame units similar to
+ * {@link #getPlaybackHeadPosition}, or zero to disable the marker.
+ * To set a marker at a position which would appear as zero due to wraparound,
+ * a workaround is to use a non-zero position near zero, such as -1 or 1.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -833,6 +864,8 @@ public class AudioTrack
* The track must be stopped or paused for the position to be changed,
* and must use the {@link #MODE_STATIC} mode.
* @param positionInFrames playback head position expressed in frames
+ * Zero corresponds to start of buffer.
+ * The position must not be greater than the buffer size in frames, or negative.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -841,18 +874,28 @@ public class AudioTrack
getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
+ if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
+ return ERROR_BAD_VALUE;
+ }
return native_set_position(positionInFrames);
}
/**
* Sets the loop points and the loop count. The loop can be infinite.
* Similarly to setPlaybackHeadPosition,
- * the track must be stopped or paused for the position to be changed,
+ * the track must be stopped or paused for the loop points to be changed,
* and must use the {@link #MODE_STATIC} mode.
* @param startInFrames loop start marker expressed in frames
+ * Zero corresponds to start of buffer.
+ * The start marker must not be greater than or equal to the buffer size in frames, or negative.
* @param endInFrames loop end marker expressed in frames
+ * The total buffer size in frames corresponds to end of buffer.
+ * The end marker must not be greater than the buffer size in frames.
+ * For looping, the end marker must not be less than or equal to the start marker,
+ * but to disable looping
+ * it is permitted for start marker, end marker, and loop count to all be 0.
* @param loopCount the number of times the loop is looped.
- * A value of -1 means infinite looping.
+ * A value of -1 means infinite looping, and 0 disables looping.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -861,14 +904,23 @@ public class AudioTrack
getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
+ if (loopCount == 0) {
+ ; // explicitly allowed as an exception to the loop region range check
+ } else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames &&
+ startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
+ return ERROR_BAD_VALUE;
+ }
return native_set_loop(startInFrames, endInFrames, loopCount);
}
/**
- * Sets the initialization state of the instance. To be used in an AudioTrack subclass
- * constructor to set a subclass-specific post-initialization state.
+ * Sets the initialization state of the instance. This method was originally intended to be used
+ * in an AudioTrack subclass constructor to set a subclass-specific post-initialization state.
+ * However, subclasses of AudioTrack are no longer recommended, so this method is obsolete.
* @param state the state of the AudioTrack instance
+ * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
*/
+ @Deprecated
protected void setState(int state) {
mState = state;
}
@@ -879,6 +931,7 @@ public class AudioTrack
//--------------------
/**
* Starts playing an AudioTrack.
+ * If track's creation mode is {@link #MODE_STATIC}, you must have called write() prior.
*
* @throws IllegalStateException
*/
@@ -943,7 +996,8 @@ public class AudioTrack
/**
* Flushes the audio data currently queued for playback. Any data that has
- * not been played back will be discarded.
+ * not been played back will be discarded. No-op if not stopped or paused,
+ * or if the track's creation mode is not {@link #MODE_STREAM}.
*/
public void flush() {
if (mState == STATE_INITIALIZED) {
@@ -954,11 +1008,13 @@ public class AudioTrack
}
/**
- * Writes the audio data to the audio hardware for playback. Will block until
- * all data has been written to the audio mixer.
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * In streaming mode, will block until all data has been written to the audio sink.
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
* Note that the actual playback of this data might occur after this function
* returns. This function is thread safe with respect to {@link #stop} calls,
- * in which case all of the specified data might not be written to the mixer.
+ * in which case all of the specified data might not be written to the audio sink.
*
* @param audioData the array that holds the data to play.
* @param offsetInBytes the offset expressed in bytes in audioData where the data to play
@@ -995,11 +1051,13 @@ public class AudioTrack
/**
- * Writes the audio data to the audio hardware for playback. Will block until
- * all data has been written to the audio mixer.
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * In streaming mode, will block until all data has been written to the audio sink.
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
* Note that the actual playback of this data might occur after this function
* returns. This function is thread safe with respect to {@link #stop} calls,
- * in which case all of the specified data might not be written to the mixer.
+ * in which case all of the specified data might not be written to the audio sink.
*
* @param audioData the array that holds the data to play.
* @param offsetInShorts the offset expressed in shorts in audioData where the data to play
@@ -1037,8 +1095,8 @@ public class AudioTrack
/**
* Notifies the native resource to reuse the audio data already loaded in the native
- * layer. This call is only valid with AudioTrack instances that don't use the streaming
- * model.
+ * layer, that is to rewind to start of buffer.
+ * The track's creation mode must be {@link #MODE_STATIC}.
* @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
@@ -1079,8 +1137,9 @@ public class AudioTrack
/**
* Sets the send level of the audio track to the attached auxiliary effect
- * {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
- * <p>By default the send level is 0, so even if an effect is attached to the player
+ * {@link #attachAuxEffect(int)}. The level value range is 0.0f to 1.0f.
+ * Values are clamped to the (0.0f, 1.0f) interval if outside this range.
+ * <p>By default the send level is 0.0f, so even if an effect is attached to the player
* this method must be called for the effect to be applied.
* <p>Note that the passed level value is a raw scalar. UI controls should be scaled
* logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
diff --git a/media/java/android/media/SoundPool.java b/media/java/android/media/SoundPool.java
index 587af47..5127479 100644
--- a/media/java/android/media/SoundPool.java
+++ b/media/java/android/media/SoundPool.java
@@ -16,19 +16,21 @@
package android.media;
-import android.util.AndroidRuntimeException;
-import android.util.Log;
import java.io.File;
import java.io.FileDescriptor;
-import android.os.ParcelFileDescriptor;
+import java.io.IOException;
import java.lang.ref.WeakReference;
+
import android.content.Context;
import android.content.res.AssetFileDescriptor;
-import java.io.IOException;
-
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
+import android.os.ParcelFileDescriptor;
+import android.os.SystemProperties;
+import android.util.AndroidRuntimeException;
+import android.util.Log;
+
/**
* The SoundPool class manages and plays audio resources for applications.
@@ -102,24 +104,8 @@ import android.os.Message;
* another level, a new SoundPool is created, sounds are loaded, and play
* resumes.</p>
*/
-public class SoundPool
-{
- static { System.loadLibrary("soundpool"); }
-
- private final static String TAG = "SoundPool";
- private final static boolean DEBUG = false;
-
- private int mNativeContext; // accessed by native methods
-
- private EventHandler mEventHandler;
- private OnLoadCompleteListener mOnLoadCompleteListener;
-
- private final Object mLock;
-
- // SoundPool messages
- //
- // must match SoundPool.h
- private static final int SAMPLE_LOADED = 1;
+public class SoundPool {
+ private final SoundPoolDelegate mImpl;
/**
* Constructor. Constructs a SoundPool object with the following
@@ -135,12 +121,11 @@ public class SoundPool
* @return a SoundPool object, or null if creation failed
*/
public SoundPool(int maxStreams, int streamType, int srcQuality) {
-
- // do native setup
- if (native_setup(new WeakReference(this), maxStreams, streamType, srcQuality) != 0) {
- throw new RuntimeException("Native setup failed");
+ if (SystemProperties.getBoolean("config.disable_media", false)) {
+ mImpl = new SoundPoolStub();
+ } else {
+ mImpl = new SoundPoolImpl(this, maxStreams, streamType, srcQuality);
}
- mLock = new Object();
}
/**
@@ -151,25 +136,8 @@ public class SoundPool
* a value of 1 for future compatibility.
* @return a sound ID. This value can be used to play or unload the sound.
*/
- public int load(String path, int priority)
- {
- // pass network streams to player
- if (path.startsWith("http:"))
- return _load(path, priority);
-
- // try local path
- int id = 0;
- try {
- File f = new File(path);
- ParcelFileDescriptor fd = ParcelFileDescriptor.open(f, ParcelFileDescriptor.MODE_READ_ONLY);
- if (fd != null) {
- id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
- fd.close();
- }
- } catch (java.io.IOException e) {
- Log.e(TAG, "error loading " + path);
- }
- return id;
+ public int load(String path, int priority) {
+ return mImpl.load(path, priority);
}
/**
@@ -188,17 +156,7 @@ public class SoundPool
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(Context context, int resId, int priority) {
- AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
- int id = 0;
- if (afd != null) {
- id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
- try {
- afd.close();
- } catch (java.io.IOException ex) {
- //Log.d(TAG, "close failed:", ex);
- }
- }
- return id;
+ return mImpl.load(context, resId, priority);
}
/**
@@ -210,15 +168,7 @@ public class SoundPool
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(AssetFileDescriptor afd, int priority) {
- if (afd != null) {
- long len = afd.getLength();
- if (len < 0) {
- throw new AndroidRuntimeException("no length for fd");
- }
- return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
- } else {
- return 0;
- }
+ return mImpl.load(afd, priority);
}
/**
@@ -236,13 +186,9 @@ public class SoundPool
* @return a sound ID. This value can be used to play or unload the sound.
*/
public int load(FileDescriptor fd, long offset, long length, int priority) {
- return _load(fd, offset, length, priority);
+ return mImpl.load(fd, offset, length, priority);
}
- private native final int _load(String uri, int priority);
-
- private native final int _load(FileDescriptor fd, long offset, long length, int priority);
-
/**
* Unload a sound from a sound ID.
*
@@ -253,7 +199,9 @@ public class SoundPool
* @param soundID a soundID returned by the load() function
* @return true if just unloaded, false if previously unloaded
*/
- public native final boolean unload(int soundID);
+ public final boolean unload(int soundID) {
+ return mImpl.unload(soundID);
+ }
/**
* Play a sound from a sound ID.
@@ -279,8 +227,11 @@ public class SoundPool
* @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
* @return non-zero streamID if successful, zero if failed
*/
- public native final int play(int soundID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
+ public final int play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate) {
+ return mImpl.play(
+ soundID, leftVolume, rightVolume, priority, loop, rate);
+ }
/**
* Pause a playback stream.
@@ -293,7 +244,9 @@ public class SoundPool
*
* @param streamID a streamID returned by the play() function
*/
- public native final void pause(int streamID);
+ public final void pause(int streamID) {
+ mImpl.pause(streamID);
+ }
/**
* Resume a playback stream.
@@ -305,7 +258,9 @@ public class SoundPool
*
* @param streamID a streamID returned by the play() function
*/
- public native final void resume(int streamID);
+ public final void resume(int streamID) {
+ mImpl.resume(streamID);
+ }
/**
* Pause all active streams.
@@ -315,7 +270,9 @@ public class SoundPool
* are playing. It also sets a flag so that any streams that
* are playing can be resumed by calling autoResume().
*/
- public native final void autoPause();
+ public final void autoPause() {
+ mImpl.autoPause();
+ }
/**
* Resume all previously active streams.
@@ -323,7 +280,9 @@ public class SoundPool
* Automatically resumes all streams that were paused in previous
* calls to autoPause().
*/
- public native final void autoResume();
+ public final void autoResume() {
+ mImpl.autoResume();
+ }
/**
* Stop a playback stream.
@@ -336,7 +295,9 @@ public class SoundPool
*
* @param streamID a streamID returned by the play() function
*/
- public native final void stop(int streamID);
+ public final void stop(int streamID) {
+ mImpl.stop(streamID);
+ }
/**
* Set stream volume.
@@ -350,8 +311,10 @@ public class SoundPool
* @param leftVolume left volume value (range = 0.0 to 1.0)
* @param rightVolume right volume value (range = 0.0 to 1.0)
*/
- public native final void setVolume(int streamID,
- float leftVolume, float rightVolume);
+ public final void setVolume(int streamID,
+ float leftVolume, float rightVolume) {
+ mImpl.setVolume(streamID, leftVolume, rightVolume);
+ }
/**
* Similar, except set volume of all channels to same value.
@@ -371,7 +334,9 @@ public class SoundPool
*
* @param streamID a streamID returned by the play() function
*/
- public native final void setPriority(int streamID, int priority);
+ public final void setPriority(int streamID, int priority) {
+ mImpl.setPriority(streamID, priority);
+ }
/**
* Set loop mode.
@@ -384,7 +349,9 @@ public class SoundPool
* @param streamID a streamID returned by the play() function
* @param loop loop mode (0 = no loop, -1 = loop forever)
*/
- public native final void setLoop(int streamID, int loop);
+ public final void setLoop(int streamID, int loop) {
+ mImpl.setLoop(streamID, loop);
+ }
/**
* Change playback rate.
@@ -398,14 +365,11 @@ public class SoundPool
* @param streamID a streamID returned by the play() function
* @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
*/
- public native final void setRate(int streamID, float rate);
+ public final void setRate(int streamID, float rate) {
+ mImpl.setRate(streamID, rate);
+ }
- /**
- * Interface definition for a callback to be invoked when all the
- * sounds are loaded.
- */
- public interface OnLoadCompleteListener
- {
+ public interface OnLoadCompleteListener {
/**
* Called when a sound has completed loading.
*
@@ -419,76 +383,297 @@ public class SoundPool
/**
* Sets the callback hook for the OnLoadCompleteListener.
*/
- public void setOnLoadCompleteListener(OnLoadCompleteListener listener)
- {
- synchronized(mLock) {
- if (listener != null) {
- // setup message handler
- Looper looper;
- if ((looper = Looper.myLooper()) != null) {
- mEventHandler = new EventHandler(this, looper);
- } else if ((looper = Looper.getMainLooper()) != null) {
- mEventHandler = new EventHandler(this, looper);
- } else {
- mEventHandler = null;
+ public void setOnLoadCompleteListener(OnLoadCompleteListener listener) {
+ mImpl.setOnLoadCompleteListener(listener);
+ }
+
+ /**
+ * Release the SoundPool resources.
+ *
+ * Release all memory and native resources used by the SoundPool
+ * object. The SoundPool can no longer be used and the reference
+ * should be set to null.
+ */
+ public final void release() {
+ mImpl.release();
+ }
+
+ /**
+ * Interface for SoundPool implementations.
+ * SoundPool is statically referenced and unconditionally called from all
+ * over the framework, so we can't simply omit the class or make it throw
+ * runtime exceptions, as doing so would break the framework. Instead we
+ * now select either a real or no-op impl object based on whether media is
+ * enabled.
+ *
+ * @hide
+ */
+ /* package */ interface SoundPoolDelegate {
+ public int load(String path, int priority);
+ public int load(Context context, int resId, int priority);
+ public int load(AssetFileDescriptor afd, int priority);
+ public int load(
+ FileDescriptor fd, long offset, long length, int priority);
+ public boolean unload(int soundID);
+ public int play(
+ int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate);
+ public void pause(int streamID);
+ public void resume(int streamID);
+ public void autoPause();
+ public void autoResume();
+ public void stop(int streamID);
+ public void setVolume(int streamID, float leftVolume, float rightVolume);
+ public void setVolume(int streamID, float volume);
+ public void setPriority(int streamID, int priority);
+ public void setLoop(int streamID, int loop);
+ public void setRate(int streamID, float rate);
+ public void setOnLoadCompleteListener(OnLoadCompleteListener listener);
+ public void release();
+ }
+
+
+ /**
+ * Real implementation of the delegate interface. This was formerly the
+ * body of SoundPool itself.
+ */
+ /* package */ static class SoundPoolImpl implements SoundPoolDelegate {
+ static { System.loadLibrary("soundpool"); }
+
+ private final static String TAG = "SoundPool";
+ private final static boolean DEBUG = false;
+
+ private int mNativeContext; // accessed by native methods
+
+ private EventHandler mEventHandler;
+ private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
+ private SoundPool mProxy;
+
+ private final Object mLock;
+
+ // SoundPool messages
+ //
+ // must match SoundPool.h
+ private static final int SAMPLE_LOADED = 1;
+
+ public SoundPoolImpl(SoundPool proxy, int maxStreams, int streamType, int srcQuality) {
+
+ // do native setup
+ if (native_setup(new WeakReference(this), maxStreams, streamType, srcQuality) != 0) {
+ throw new RuntimeException("Native setup failed");
+ }
+ mLock = new Object();
+ mProxy = proxy;
+ }
+
+ public int load(String path, int priority)
+ {
+ // pass network streams to player
+ if (path.startsWith("http:"))
+ return _load(path, priority);
+
+ // try local path
+ int id = 0;
+ try {
+ File f = new File(path);
+ ParcelFileDescriptor fd = ParcelFileDescriptor.open(f, ParcelFileDescriptor.MODE_READ_ONLY);
+ if (fd != null) {
+ id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
+ fd.close();
}
+ } catch (java.io.IOException e) {
+ Log.e(TAG, "error loading " + path);
+ }
+ return id;
+ }
+
+ public int load(Context context, int resId, int priority) {
+ AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
+ int id = 0;
+ if (afd != null) {
+ id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
+ try {
+ afd.close();
+ } catch (java.io.IOException ex) {
+ //Log.d(TAG, "close failed:", ex);
+ }
+ }
+ return id;
+ }
+
+ public int load(AssetFileDescriptor afd, int priority) {
+ if (afd != null) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
} else {
- mEventHandler = null;
+ return 0;
}
- mOnLoadCompleteListener = listener;
}
- }
- private class EventHandler extends Handler
- {
- private SoundPool mSoundPool;
+ public int load(FileDescriptor fd, long offset, long length, int priority) {
+ return _load(fd, offset, length, priority);
+ }
+
+ private native final int _load(String uri, int priority);
+
+ private native final int _load(FileDescriptor fd, long offset, long length, int priority);
+
+ public native final boolean unload(int soundID);
+
+ public native final int play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate);
+
+ public native final void pause(int streamID);
+
+ public native final void resume(int streamID);
+
+ public native final void autoPause();
+
+ public native final void autoResume();
+
+ public native final void stop(int streamID);
- public EventHandler(SoundPool soundPool, Looper looper) {
- super(looper);
- mSoundPool = soundPool;
+ public native final void setVolume(int streamID,
+ float leftVolume, float rightVolume);
+
+ public void setVolume(int streamID, float volume) {
+ setVolume(streamID, volume, volume);
}
- @Override
- public void handleMessage(Message msg) {
- switch(msg.what) {
- case SAMPLE_LOADED:
- if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
- synchronized(mLock) {
- if (mOnLoadCompleteListener != null) {
- mOnLoadCompleteListener.onLoadComplete(mSoundPool, msg.arg1, msg.arg2);
+ public native final void setPriority(int streamID, int priority);
+
+ public native final void setLoop(int streamID, int loop);
+
+ public native final void setRate(int streamID, float rate);
+
+ public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener)
+ {
+ synchronized(mLock) {
+ if (listener != null) {
+ // setup message handler
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(mProxy, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(mProxy, looper);
+ } else {
+ mEventHandler = null;
}
+ } else {
+ mEventHandler = null;
}
- break;
- default:
- Log.e(TAG, "Unknown message type " + msg.what);
- return;
+ mOnLoadCompleteListener = listener;
}
}
- }
- // post event from native code to message handler
- private static void postEventFromNative(Object weakRef, int msg, int arg1, int arg2, Object obj)
- {
- SoundPool soundPool = (SoundPool)((WeakReference)weakRef).get();
- if (soundPool == null)
- return;
+ private class EventHandler extends Handler
+ {
+ private SoundPool mSoundPool;
+
+ public EventHandler(SoundPool soundPool, Looper looper) {
+ super(looper);
+ mSoundPool = soundPool;
+ }
- if (soundPool.mEventHandler != null) {
- Message m = soundPool.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
- soundPool.mEventHandler.sendMessage(m);
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case SAMPLE_LOADED:
+ if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
+ synchronized(mLock) {
+ if (mOnLoadCompleteListener != null) {
+ mOnLoadCompleteListener.onLoadComplete(mSoundPool, msg.arg1, msg.arg2);
+ }
+ }
+ break;
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
}
+
+ // post event from native code to message handler
+ private static void postEventFromNative(Object weakRef, int msg, int arg1, int arg2, Object obj)
+ {
+ SoundPoolImpl soundPoolImpl = (SoundPoolImpl)((WeakReference)weakRef).get();
+ if (soundPoolImpl == null)
+ return;
+
+ if (soundPoolImpl.mEventHandler != null) {
+ Message m = soundPoolImpl.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
+ soundPoolImpl.mEventHandler.sendMessage(m);
+ }
+ }
+
+ public native final void release();
+
+ private native final int native_setup(Object weakRef, int maxStreams, int streamType, int srcQuality);
+
+ protected void finalize() { release(); }
}
/**
- * Release the SoundPool resources.
- *
- * Release all memory and native resources used by the SoundPool
- * object. The SoundPool can no longer be used and the reference
- * should be set to null.
+ * No-op implementation of SoundPool.
+ * Used when media is disabled by the system.
+ * @hide
*/
- public native final void release();
+ /* package */ static class SoundPoolStub implements SoundPoolDelegate {
+ public SoundPoolStub() { }
+
+ public int load(String path, int priority) {
+ return 0;
+ }
+
+ public int load(Context context, int resId, int priority) {
+ return 0;
+ }
+
+ public int load(AssetFileDescriptor afd, int priority) {
+ return 0;
+ }
+
+ public int load(FileDescriptor fd, long offset, long length, int priority) {
+ return 0;
+ }
+
+ public final boolean unload(int soundID) {
+ return true;
+ }
+
+ public final int play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate) {
+ return 0;
+ }
+
+ public final void pause(int streamID) { }
+
+ public final void resume(int streamID) { }
+
+ public final void autoPause() { }
+
+ public final void autoResume() { }
- private native final int native_setup(Object weakRef, int maxStreams, int streamType, int srcQuality);
+ public final void stop(int streamID) { }
- protected void finalize() { release(); }
+ public final void setVolume(int streamID,
+ float leftVolume, float rightVolume) { }
+
+ public void setVolume(int streamID, float volume) {
+ }
+
+ public final void setPriority(int streamID, int priority) { }
+
+ public final void setLoop(int streamID, int loop) { }
+
+ public final void setRate(int streamID, float rate) { }
+
+ public void setOnLoadCompleteListener(SoundPool.OnLoadCompleteListener listener) {
+ }
+
+ public final void release() { }
+ }
}
diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk
index 5835b9f..ed8d7c1 100644
--- a/media/jni/soundpool/Android.mk
+++ b/media/jni/soundpool/Android.mk
@@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- android_media_SoundPool.cpp
+ android_media_SoundPool_SoundPoolImpl.cpp
LOCAL_SHARED_LIBRARIES := \
liblog \
diff --git a/media/jni/soundpool/android_media_SoundPool.cpp b/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp
index 9658856..2604850 100644
--- a/media/jni/soundpool/android_media_SoundPool.cpp
+++ b/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp
@@ -39,9 +39,9 @@ static inline SoundPool* MusterSoundPool(JNIEnv *env, jobject thiz) {
// ----------------------------------------------------------------------------
static int
-android_media_SoundPool_load_URL(JNIEnv *env, jobject thiz, jstring path, jint priority)
+android_media_SoundPool_SoundPoolImpl_load_URL(JNIEnv *env, jobject thiz, jstring path, jint priority)
{
- ALOGV("android_media_SoundPool_load_URL");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_load_URL");
SoundPool *ap = MusterSoundPool(env, thiz);
if (path == NULL) {
jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
@@ -54,10 +54,10 @@ android_media_SoundPool_load_URL(JNIEnv *env, jobject thiz, jstring path, jint p
}
static int
-android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
+android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
jlong offset, jlong length, jint priority)
{
- ALOGV("android_media_SoundPool_load_FD");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_load_FD");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return ap->load(jniGetFDFromFileDescriptor(env, fileDescriptor),
@@ -65,104 +65,104 @@ android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescripto
}
static bool
-android_media_SoundPool_unload(JNIEnv *env, jobject thiz, jint sampleID) {
- ALOGV("android_media_SoundPool_unload\n");
+android_media_SoundPool_SoundPoolImpl_unload(JNIEnv *env, jobject thiz, jint sampleID) {
+ ALOGV("android_media_SoundPool_SoundPoolImpl_unload\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return ap->unload(sampleID);
}
static int
-android_media_SoundPool_play(JNIEnv *env, jobject thiz, jint sampleID,
+android_media_SoundPool_SoundPoolImpl_play(JNIEnv *env, jobject thiz, jint sampleID,
jfloat leftVolume, jfloat rightVolume, jint priority, jint loop,
jfloat rate)
{
- ALOGV("android_media_SoundPool_play\n");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_play\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return ap->play(sampleID, leftVolume, rightVolume, priority, loop, rate);
}
static void
-android_media_SoundPool_pause(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_SoundPoolImpl_pause(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_pause");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_pause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->pause(channelID);
}
static void
-android_media_SoundPool_resume(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_SoundPoolImpl_resume(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_resume");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_resume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->resume(channelID);
}
static void
-android_media_SoundPool_autoPause(JNIEnv *env, jobject thiz)
+android_media_SoundPool_SoundPoolImpl_autoPause(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_autoPause");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_autoPause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoPause();
}
static void
-android_media_SoundPool_autoResume(JNIEnv *env, jobject thiz)
+android_media_SoundPool_SoundPoolImpl_autoResume(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_autoResume");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_autoResume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoResume();
}
static void
-android_media_SoundPool_stop(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_SoundPoolImpl_stop(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_stop");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_stop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->stop(channelID);
}
static void
-android_media_SoundPool_setVolume(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_SoundPoolImpl_setVolume(JNIEnv *env, jobject thiz, jint channelID,
float leftVolume, float rightVolume)
{
- ALOGV("android_media_SoundPool_setVolume");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_setVolume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setVolume(channelID, leftVolume, rightVolume);
}
static void
-android_media_SoundPool_setPriority(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_SoundPoolImpl_setPriority(JNIEnv *env, jobject thiz, jint channelID,
int priority)
{
- ALOGV("android_media_SoundPool_setPriority");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_setPriority");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setPriority(channelID, priority);
}
static void
-android_media_SoundPool_setLoop(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_SoundPoolImpl_setLoop(JNIEnv *env, jobject thiz, jint channelID,
int loop)
{
- ALOGV("android_media_SoundPool_setLoop");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_setLoop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setLoop(channelID, loop);
}
static void
-android_media_SoundPool_setRate(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_SoundPoolImpl_setRate(JNIEnv *env, jobject thiz, jint channelID,
float rate)
{
- ALOGV("android_media_SoundPool_setRate");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_setRate");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setRate(channelID, rate);
@@ -176,9 +176,9 @@ static void android_media_callback(SoundPoolEvent event, SoundPool* soundPool, v
}
static jint
-android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef, jint maxChannels, jint streamType, jint srcQuality)
+android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jobject weakRef, jint maxChannels, jint streamType, jint srcQuality)
{
- ALOGV("android_media_SoundPool_native_setup");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_native_setup");
SoundPool *ap = new SoundPool(maxChannels, (audio_stream_type_t) streamType, srcQuality);
if (ap == NULL) {
return -1;
@@ -194,9 +194,9 @@ android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
}
static void
-android_media_SoundPool_release(JNIEnv *env, jobject thiz)
+android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_release");
+ ALOGV("android_media_SoundPool_SoundPoolImpl_release");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap != NULL) {
@@ -219,67 +219,67 @@ android_media_SoundPool_release(JNIEnv *env, jobject thiz)
static JNINativeMethod gMethods[] = {
{ "_load",
"(Ljava/lang/String;I)I",
- (void *)android_media_SoundPool_load_URL
+ (void *)android_media_SoundPool_SoundPoolImpl_load_URL
},
{ "_load",
"(Ljava/io/FileDescriptor;JJI)I",
- (void *)android_media_SoundPool_load_FD
+ (void *)android_media_SoundPool_SoundPoolImpl_load_FD
},
{ "unload",
"(I)Z",
- (void *)android_media_SoundPool_unload
+ (void *)android_media_SoundPool_SoundPoolImpl_unload
},
{ "play",
"(IFFIIF)I",
- (void *)android_media_SoundPool_play
+ (void *)android_media_SoundPool_SoundPoolImpl_play
},
{ "pause",
"(I)V",
- (void *)android_media_SoundPool_pause
+ (void *)android_media_SoundPool_SoundPoolImpl_pause
},
{ "resume",
"(I)V",
- (void *)android_media_SoundPool_resume
+ (void *)android_media_SoundPool_SoundPoolImpl_resume
},
{ "autoPause",
"()V",
- (void *)android_media_SoundPool_autoPause
+ (void *)android_media_SoundPool_SoundPoolImpl_autoPause
},
{ "autoResume",
"()V",
- (void *)android_media_SoundPool_autoResume
+ (void *)android_media_SoundPool_SoundPoolImpl_autoResume
},
{ "stop",
"(I)V",
- (void *)android_media_SoundPool_stop
+ (void *)android_media_SoundPool_SoundPoolImpl_stop
},
{ "setVolume",
"(IFF)V",
- (void *)android_media_SoundPool_setVolume
+ (void *)android_media_SoundPool_SoundPoolImpl_setVolume
},
{ "setPriority",
"(II)V",
- (void *)android_media_SoundPool_setPriority
+ (void *)android_media_SoundPool_SoundPoolImpl_setPriority
},
{ "setLoop",
"(II)V",
- (void *)android_media_SoundPool_setLoop
+ (void *)android_media_SoundPool_SoundPoolImpl_setLoop
},
{ "setRate",
"(IF)V",
- (void *)android_media_SoundPool_setRate
+ (void *)android_media_SoundPool_SoundPoolImpl_setRate
},
{ "native_setup",
"(Ljava/lang/Object;III)I",
- (void*)android_media_SoundPool_native_setup
+ (void*)android_media_SoundPool_SoundPoolImpl_native_setup
},
{ "release",
"()V",
- (void*)android_media_SoundPool_release
+ (void*)android_media_SoundPool_SoundPoolImpl_release
}
};
-static const char* const kClassPathName = "android/media/SoundPool";
+static const char* const kClassPathName = "android/media/SoundPool$SoundPoolImpl";
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
@@ -301,14 +301,14 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
fields.mNativeContext = env->GetFieldID(clazz, "mNativeContext", "I");
if (fields.mNativeContext == NULL) {
- ALOGE("Can't find SoundPool.mNativeContext");
+ ALOGE("Can't find SoundPoolImpl.mNativeContext");
goto bail;
}
fields.mPostEvent = env->GetStaticMethodID(clazz, "postEventFromNative",
"(Ljava/lang/Object;IIILjava/lang/Object;)V");
if (fields.mPostEvent == NULL) {
- ALOGE("Can't find android/media/SoundPool.postEventFromNative");
+ ALOGE("Can't find android/media/SoundPoolImpl.postEventFromNative");
goto bail;
}