summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorThe Android Open Source Project <initial-contribution@android.com>2009-03-02 22:54:33 -0800
committerThe Android Open Source Project <initial-contribution@android.com>2009-03-02 22:54:33 -0800
commit3dec7d563a2f3e1eb967ce2054a00b6620e3558c (patch)
treeaa3b0365c47cb3c1607c0dc76c8d32b4046fc287 /media
parent15ab3eae2ec3d73b3e8aa60b33ae41445bf83f4b (diff)
downloadframeworks_base-3dec7d563a2f3e1eb967ce2054a00b6620e3558c.zip
frameworks_base-3dec7d563a2f3e1eb967ce2054a00b6620e3558c.tar.gz
frameworks_base-3dec7d563a2f3e1eb967ce2054a00b6620e3558c.tar.bz2
auto import from //depot/cupcake/@137055
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioRecord.java38
-rw-r--r--media/java/android/media/AudioTrack.java57
-rw-r--r--media/java/android/media/JetPlayer.java11
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java14
-rw-r--r--media/java/android/media/MediaPlayer.java13
-rw-r--r--media/java/android/media/MediaRecorder.java238
-rw-r--r--media/java/android/media/Ringtone.java13
-rw-r--r--media/java/android/media/SoundPool.java7
-rw-r--r--media/jni/android_media_MediaRecorder.cpp153
-rw-r--r--media/libmedia/AudioRecord.cpp25
-rw-r--r--media/libmedia/AudioTrack.cpp13
-rw-r--r--media/libmedia/IMediaRecorder.cpp22
-rw-r--r--media/libmedia/mediarecorder.cpp75
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp110
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp11
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.h1
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java5
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java12
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java755
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java475
20 files changed, 1760 insertions, 288 deletions
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index fd990fe..0ef7760 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -532,12 +532,19 @@ public class AudioRecord
* @param audioData the array to which the recorded audio data is written.
* @param offsetInBytes index in audioData from which the data is written.
* @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or -1 if the object wasn't properly
- * initialized. The number of bytes will not exceed sizeInBytes.
+ * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
*/
public int read(byte[] audioData, int offsetInBytes, int sizeInBytes) {
if (mState != STATE_INITIALIZED) {
- return -1;
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
+ || (offsetInBytes + sizeInBytes > audioData.length)) {
+ return ERROR_BAD_VALUE;
}
return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes);
@@ -549,12 +556,19 @@ public class AudioRecord
* @param audioData the array to which the recorded audio data is written.
* @param offsetInShorts index in audioData from which the data is written.
* @param sizeInShorts the number of requested shorts.
- * @return the number of shorts that were read. or -1 if the object wasn't properly
- * initialized. The number of shorts will not exceed sizeInShorts
+ * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of shorts will not exceed sizeInShorts.
*/
public int read(short[] audioData, int offsetInShorts, int sizeInShorts) {
if (mState != STATE_INITIALIZED) {
- return -1;
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
+ || (offsetInShorts + sizeInShorts > audioData.length)) {
+ return ERROR_BAD_VALUE;
}
return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts);
@@ -566,12 +580,18 @@ public class AudioRecord
* is not a direct buffer, this method will always return 0.
* @param audioBuffer the direct buffer to which the recorded audio data is written.
* @param sizeInBytes the number of requested bytes.
- * @return the number of bytes that were read or -1 if the object wasn't properly
- * initialized. The number of bytes will not exceed sizeInBytes.
+ * @return the number of bytes that were read or or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
+ * The number of bytes will not exceed sizeInBytes.
*/
public int read(ByteBuffer audioBuffer, int sizeInBytes) {
if (mState != STATE_INITIALIZED) {
- return -1;
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ( (audioBuffer == null) || (sizeInBytes < 0) ) {
+ return ERROR_BAD_VALUE;
}
return native_read_in_direct_buffer(audioBuffer, sizeInBytes);
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index e32835c..997cd44 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -436,6 +436,15 @@ public class AudioTrack
public int getSampleRate() {
return mSampleRate;
}
+
+ /**
+ * @hide
+ * Returns the current playback rate in Hz. Note that this rate may differ from one set using
+ * {@link #setPlaybackRate(int)} as the value effectively set is implementation-dependent.
+ */
+ public int getPlaybackRate() {
+ return native_get_playback_rate();
+ }
/**
* Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
@@ -523,8 +532,8 @@ public class AudioTrack
/**
* Returns the hardware output sample rate
*/
- static public int getNativeOutputSampleRate() {
- return native_get_output_sample_rate();
+ static public int getNativeOutputSampleRate(int streamType) {
+ return native_get_output_sample_rate(streamType);
}
/**
@@ -650,16 +659,19 @@ public class AudioTrack
* content. Setting it to half the sample rate of the content will cause the playback to
* last twice as long, but will also result result in a negative pitch shift.
* The current implementation supports a maximum sample rate of twice the hardware output
- * sample rate (see {@link #getNativeOutputSampleRate()}). Use {@link #getSampleRate()} to
+ * sample rate (see {@link #getNativeOutputSampleRate(int)}). Use {@link #getSampleRate()} to
* check the rate actually used in hardware after potential clamping.
* @param sampleRateInHz
- * @return error code or success, see {@link #SUCCESS},
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
* {@link #ERROR_INVALID_OPERATION}
*/
public int setPlaybackRate(int sampleRateInHz) {
if (mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
}
+ if (sampleRateInHz <= 0) {
+ return ERROR_BAD_VALUE;
+ }
native_set_playback_rate(sampleRateInHz);
return SUCCESS;
}
@@ -699,7 +711,7 @@ public class AudioTrack
*/
public int setPlaybackHeadPosition(int positionInFrames) {
synchronized(mPlayStateLock) {
- if(mPlayState == PLAYSTATE_STOPPED) {
+ if ((mPlayState == PLAYSTATE_STOPPED) || (mPlayState == PLAYSTATE_PAUSED)) {
return native_set_position(positionInFrames);
} else {
return ERROR_INVALID_OPERATION;
@@ -717,6 +729,9 @@ public class AudioTrack
* {@link #ERROR_INVALID_OPERATION}
*/
public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
+ if (mDataLoadMode == MODE_STREAM) {
+ return ERROR_INVALID_OPERATION;
+ }
return native_set_loop(startInFrames, endInFrames, loopCount);
}
@@ -806,8 +821,9 @@ public class AudioTrack
* @param audioData the array that holds the data to play.
* @param offsetInBytes the offset in audioData where the data to play starts.
* @param sizeInBytes the number of bytes to read in audioData after the offset.
- * @return the number of bytes that were written or -1 if the object wasn't properly
- * initialized.
+ * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
*/
public int write(byte[] audioData,int offsetInBytes, int sizeInBytes) {
@@ -816,11 +832,14 @@ public class AudioTrack
&& (sizeInBytes > 0)) {
mState = STATE_INITIALIZED;
}
- //TODO check if future writes should be forbidden for static tracks
- // or: how to update data for static tracks?
if (mState != STATE_INITIALIZED) {
- return -1;
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
+ || (offsetInBytes + sizeInBytes > audioData.length)) {
+ return ERROR_BAD_VALUE;
}
return native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat);
@@ -832,8 +851,9 @@ public class AudioTrack
* @param audioData the array that holds the data to play.
* @param offsetInShorts the offset in audioData where the data to play starts.
* @param sizeInShorts the number of bytes to read in audioData after the offset.
- * @return the number of shorts that were written or -1 if the object wasn't properly
- * initialized.
+ * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
+ * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
+ * the parameters don't resolve to valid data and indexes.
*/
public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
@@ -842,11 +862,14 @@ public class AudioTrack
&& (sizeInShorts > 0)) {
mState = STATE_INITIALIZED;
}
- //TODO check if future writes should be forbidden for static tracks
- // or: how to update data for static tracks?
-
+
if (mState != STATE_INITIALIZED) {
- return -1;
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
+ || (offsetInShorts + sizeInShorts > audioData.length)) {
+ return ERROR_BAD_VALUE;
}
return native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
@@ -1007,7 +1030,7 @@ public class AudioTrack
private native final int native_set_loop(int start, int end, int loopCount);
- static private native final int native_get_output_sample_rate();
+ static private native final int native_get_output_sample_rate(int streamType);
static private native final int native_get_min_buff_size(
int sampleRateInHz, int channelConfig, int audioFormat);
diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java
index bfa2f80..9de0eec 100644
--- a/media/java/android/media/JetPlayer.java
+++ b/media/java/android/media/JetPlayer.java
@@ -25,6 +25,7 @@ import android.content.res.AssetFileDescriptor;
import android.os.Looper;
import android.os.Handler;
import android.os.Message;
+import android.util.AndroidRuntimeException;
import android.util.Log;
/**
@@ -163,8 +164,12 @@ public class JetPlayer
public boolean loadJetFile(AssetFileDescriptor afd) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
return native_loadJetFromFileD(
- afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
+ afd.getFileDescriptor(), afd.getStartOffset(), len);
}
@@ -251,7 +256,9 @@ public class JetPlayer
mJet,
(short)((msg.arg1 & JET_EVENT_SEG_MASK) >> JET_EVENT_SEG_SHIFT),
(byte) ((msg.arg1 & JET_EVENT_TRACK_MASK) >> JET_EVENT_TRACK_SHIFT),
- (byte) ((msg.arg1 & JET_EVENT_CHAN_MASK) >> JET_EVENT_CHAN_SHIFT),
+ // JETCreator channel numbers start at 1, but the index starts at 0
+ // in the .jet files
+ (byte)(((msg.arg1 & JET_EVENT_CHAN_MASK) >> JET_EVENT_CHAN_SHIFT) + 1),
(byte) ((msg.arg1 & JET_EVENT_CTRL_MASK) >> JET_EVENT_CTRL_SHIFT),
(byte) (msg.arg1 & JET_EVENT_VAL_MASK) );
}
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index c1a0c21..3a49a5f 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -18,6 +18,7 @@ package android.media;
import android.content.ContentResolver;
import android.content.Context;
+import android.content.res.AssetFileDescriptor;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.ParcelFileDescriptor;
@@ -137,11 +138,11 @@ public class MediaMetadataRetriever
return;
}
- ParcelFileDescriptor fd = null;
+ AssetFileDescriptor fd = null;
try {
ContentResolver resolver = context.getContentResolver();
try {
- fd = resolver.openFileDescriptor(uri, "r");
+ fd = resolver.openAssetFileDescriptor(uri, "r");
} catch(FileNotFoundException e) {
throw new IllegalArgumentException();
}
@@ -152,7 +153,14 @@ public class MediaMetadataRetriever
if (!descriptor.valid()) {
throw new IllegalArgumentException();
}
- setDataSource(descriptor);
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (fd.getDeclaredLength() < 0) {
+ setDataSource(descriptor);
+ } else {
+ setDataSource(descriptor, fd.getStartOffset(), fd.getDeclaredLength());
+ }
return;
} catch (SecurityException ex) {
} finally {
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index 601557d..fe1de8e 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -584,14 +584,21 @@ public class MediaPlayer
return;
}
- ParcelFileDescriptor fd = null;
+ AssetFileDescriptor fd = null;
try {
ContentResolver resolver = context.getContentResolver();
- fd = resolver.openFileDescriptor(uri, "r");
+ fd = resolver.openAssetFileDescriptor(uri, "r");
if (fd == null) {
return;
}
- setDataSource(fd.getFileDescriptor());
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (fd.getDeclaredLength() < 0) {
+ setDataSource(fd.getFileDescriptor());
+ } else {
+ setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getDeclaredLength());
+ }
return;
} catch (SecurityException ex) {
} catch (IOException ex) {
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 3609826..4906cbb 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -16,23 +16,27 @@
package android.media;
-import android.view.Surface;
import android.hardware.Camera;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileDescriptor;
-import android.util.Log;
+import java.lang.ref.WeakReference;
/**
* Used to record audio and video. The recording control is based on a
- * simple state machine (see below).
- *
+ * simple state machine (see below).
+ *
* <p><img src="{@docRoot}images/mediarecorder_state_diagram.gif" border="0" />
* </p>
- *
+ *
* <p>A common case of using MediaRecorder to record audio works as follows:
- *
+ *
* <pre>MediaRecorder recorder = new MediaRecorder();
* recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
* recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
@@ -45,39 +49,54 @@ import android.util.Log;
* recorder.reset(); // You can reuse the object by going back to setAudioSource() step
* recorder.release(); // Now the object cannot be reused
* </pre>
- *
+ *
* <p>See the <a href="{@docRoot}guide/topics/media/index.html">Audio and Video</a>
* documentation for additional help with using MediaRecorder.
*/
public class MediaRecorder
-{
+{
static {
System.loadLibrary("media_jni");
}
private final static String TAG = "MediaRecorder";
-
+
// The two fields below are accessed by native methods
@SuppressWarnings("unused")
private int mNativeContext;
-
+
@SuppressWarnings("unused")
private Surface mSurface;
private String mPath;
private FileDescriptor mFd;
+ private EventHandler mEventHandler;
+ private OnErrorListener mOnErrorListener;
/**
* Default constructor.
*/
public MediaRecorder() {
- native_setup();
+
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+
+ /* Native setup requires a weak reference to our object.
+ * It's easier to create it here than in C++.
+ */
+ native_setup(new WeakReference<MediaRecorder>(this));
}
-
+
/**
* Sets a Camera to use for recording. Use this function to switch
* quickly between preview and capture mode without a teardown of
* the camera object. Must call before prepare().
- *
+ *
* @param c the Camera to use for recording
*/
public native void setCamera(Camera c);
@@ -86,15 +105,15 @@ public class MediaRecorder
* Sets a Surface to show a preview of recorded media (video). Calls this
* before prepare() to make sure that the desirable preview display is
* set.
- *
+ *
* @param sv the Surface to use for the preview
*/
public void setPreviewDisplay(Surface sv) {
mSurface = sv;
}
-
+
/**
- * Defines the audio source. These constants are used with
+ * Defines the audio source. These constants are used with
* {@link MediaRecorder#setAudioSource(int)}.
*/
public final class AudioSource {
@@ -108,7 +127,7 @@ public class MediaRecorder
}
/**
- * Defines the video source. These constants are used with
+ * Defines the video source. These constants are used with
* {@link MediaRecorder#setVideoSource(int)}.
*/
public final class VideoSource {
@@ -122,7 +141,7 @@ public class MediaRecorder
}
/**
- * Defines the output format. These constants are used with
+ * Defines the output format. These constants are used with
* {@link MediaRecorder#setOutputFormat(int)}.
*/
public final class OutputFormat {
@@ -140,7 +159,7 @@ public class MediaRecorder
};
/**
- * Defines the audio encoding. These constants are used with
+ * Defines the audio encoding. These constants are used with
* {@link MediaRecorder#setAudioEncoder(int)}.
*/
public final class AudioEncoder {
@@ -155,7 +174,7 @@ public class MediaRecorder
}
/**
- * Defines the video encoding. These constants are used with
+ * Defines the video encoding. These constants are used with
* {@link MediaRecorder#setVideoEncoder(int)}.
*/
public final class VideoEncoder {
@@ -172,50 +191,50 @@ public class MediaRecorder
/**
* Sets the audio source to be used for recording. If this method is not
* called, the output file will not contain an audio track. The source needs
- * to be specified before setting recording-parameters or encoders. Call
+ * to be specified before setting recording-parameters or encoders. Call
* this only before setOutputFormat().
- *
+ *
* @param audio_source the audio source to use
* @throws IllegalStateException if it is called after setOutputFormat()
* @see android.media.MediaRecorder.AudioSource
- */
+ */
public native void setAudioSource(int audio_source)
throws IllegalStateException;
/**
* Sets the video source to be used for recording. If this method is not
* called, the output file will not contain an video track. The source needs
- * to be specified before setting recording-parameters or encoders. Call
+ * to be specified before setting recording-parameters or encoders. Call
* this only before setOutputFormat().
- *
+ *
* @param video_source the video source to use
* @throws IllegalStateException if it is called after setOutputFormat()
* @see android.media.MediaRecorder.VideoSource
- */
+ */
public native void setVideoSource(int video_source)
throws IllegalStateException;
/**
* Sets the format of the output file produced during recording. Call this
* after setAudioSource()/setVideoSource() but before prepare().
- *
- * @param output_format the output format to use. The output format
+ *
+ * @param output_format the output format to use. The output format
* needs to be specified before setting recording-parameters or encoders.
* @throws IllegalStateException if it is called after prepare() or before
* setAudioSource()/setVideoSource().
* @see android.media.MediaRecorder.OutputFormat
- */
+ */
public native void setOutputFormat(int output_format)
throws IllegalStateException;
-
+
/**
* Sets the width and height of the video to be captured. Must be called
* after setVideoSource(). Call this after setOutFormat() but before
* prepare().
- *
+ *
* @param width the width of the video to be captured
* @param height the height of the video to be captured
- * @throws IllegalStateException if it is called after
+ * @throws IllegalStateException if it is called after
* prepare() or before setOutputFormat()
*/
public native void setVideoSize(int width, int height)
@@ -227,7 +246,7 @@ public class MediaRecorder
* prepare().
*
* @param rate the number of frames per second of video to capture
- * @throws IllegalStateException if it is called after
+ * @throws IllegalStateException if it is called after
* prepare() or before setOutputFormat().
*
* NOTE: On some devices that have auto-frame rate, this sets the
@@ -240,12 +259,12 @@ public class MediaRecorder
* Sets the audio encoder to be used for recording. If this method is not
* called, the output file will not contain an audio track. Call this after
* setOutputFormat() but before prepare().
- *
+ *
* @param audio_encoder the audio encoder to use.
* @throws IllegalStateException if it is called before
* setOutputFormat() or after prepare().
* @see android.media.MediaRecorder.AudioEncoder
- */
+ */
public native void setAudioEncoder(int audio_encoder)
throws IllegalStateException;
@@ -253,43 +272,43 @@ public class MediaRecorder
* Sets the video encoder to be used for recording. If this method is not
* called, the output file will not contain an video track. Call this after
* setOutputFormat() and before prepare().
- *
+ *
* @param video_encoder the video encoder to use.
* @throws IllegalStateException if it is called before
* setOutputFormat() or after prepare()
* @see android.media.MediaRecorder.VideoEncoder
- */
+ */
public native void setVideoEncoder(int video_encoder)
throws IllegalStateException;
/**
* Pass in the file descriptor of the file to be written. Call this after
* setOutputFormat() but before prepare().
- *
+ *
* @param fd an open file descriptor to be written into.
* @throws IllegalStateException if it is called before
* setOutputFormat() or after prepare()
- */
+ */
public void setOutputFile(FileDescriptor fd) throws IllegalStateException
{
mPath = null;
mFd = fd;
}
-
+
/**
* Sets the path of the output file to be produced. Call this after
* setOutputFormat() but before prepare().
- *
+ *
* @param path The pathname to use.
* @throws IllegalStateException if it is called before
* setOutputFormat() or after prepare()
- */
+ */
public void setOutputFile(String path) throws IllegalStateException
{
mFd = null;
mPath = path;
}
-
+
// native implementation
private native void _setOutputFile(FileDescriptor fd, long offset, long length)
throws IllegalStateException, IOException;
@@ -299,7 +318,7 @@ public class MediaRecorder
* Prepares the recorder to begin capturing and encoding data. This method
* must be called after setting up the desired audio and video sources,
* encoders, file format, etc., but before start().
- *
+ *
* @throws IllegalStateException if it is called after
* start() or before setOutputFormat().
* @throws IOException if prepare fails otherwise.
@@ -307,8 +326,12 @@ public class MediaRecorder
public void prepare() throws IllegalStateException, IOException
{
if (mPath != null) {
- FileOutputStream f = new FileOutputStream(mPath);
- _setOutputFile(f.getFD(), 0, 0);
+ FileOutputStream fos = new FileOutputStream(mPath);
+ try {
+ _setOutputFile(fos.getFD(), 0, 0);
+ } finally {
+ fos.close();
+ }
} else if (mFd != null) {
_setOutputFile(mFd, 0, 0);
} else {
@@ -318,9 +341,9 @@ public class MediaRecorder
}
/**
- * Begins capturing and encoding data to the file specified with
+ * Begins capturing and encoding data to the file specified with
* setOutputFile(). Call this after prepare().
- *
+ *
* @throws IllegalStateException if it is called before
* prepare().
*/
@@ -329,7 +352,7 @@ public class MediaRecorder
/**
* Stops recording. Call this after start(). Once recording is stopped,
* you will have to configure it again as if it has just been constructed.
- *
+ *
* @throws IllegalStateException if it is called before start()
*/
public native void stop() throws IllegalStateException;
@@ -339,19 +362,118 @@ public class MediaRecorder
* this method, you will have to configure it again as if it had just been
* constructed.
*/
- public native void reset();
-
+ public void reset() {
+ native_reset();
+
+ // make sure none of the listeners get called anymore
+ mEventHandler.removeCallbacksAndMessages(null);
+ }
+
+ private native void native_reset();
+
/**
- * Returns the maximum absolute amplitude that was sampled since the last
+ * Returns the maximum absolute amplitude that was sampled since the last
* call to this method. Call this only after the setAudioSource().
- *
- * @return the maximum absolute amplitude measured since the last call, or
+ *
+ * @return the maximum absolute amplitude measured since the last call, or
* 0 when called for the first time
* @throws IllegalStateException if it is called before
* the audio source has been set.
*/
public native int getMaxAmplitude() throws IllegalStateException;
-
+
+ /* Do not change this value without updating its counterpart
+ * in include/media/mediarecorder.h!
+ */
+ /** Unspecified media recorder error.
+ * @see android.media.MediaRecorder.OnErrorListener
+ */
+ public static final int MEDIA_RECORDER_ERROR_UNKNOWN = 1;
+
+ /**
+ * Interface definition for a callback to be invoked when an error
+ * occurs while recording.
+ */
+ public interface OnErrorListener
+ {
+ /**
+ * Called when an error occurs while recording.
+ *
+ * @param mr the MediaRecorder that encountered the error
+ * @param what the type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIA_RECORDER_ERROR_UNKNOWN}
+ * </ul>
+ * @param extra an extra code, specific to the error type
+ */
+ void onError(MediaRecorder mr, int what, int extra);
+ }
+
+ /**
+ * Register a callback to be invoked when an error occurs while
+ * recording.
+ *
+ * @param l the callback that will be run
+ */
+ public void setOnErrorListener(OnErrorListener l)
+ {
+ mOnErrorListener = l;
+ }
+
+ private class EventHandler extends Handler
+ {
+ private MediaRecorder mMediaRecorder;
+
+ public EventHandler(MediaRecorder mr, Looper looper) {
+ super(looper);
+ mMediaRecorder = mr;
+ }
+
+ /* Do not change this value without updating its counterpart
+ * in include/media/mediarecorder.h!
+ */
+ private static final int MEDIA_RECORDER_EVENT_ERROR = 1;
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mMediaRecorder.mNativeContext == 0) {
+ Log.w(TAG, "mediarecorder went away with unhandled events");
+ return;
+ }
+ switch(msg.what) {
+ case MEDIA_RECORDER_EVENT_ERROR:
+ if (mOnErrorListener != null)
+ mOnErrorListener.onError(mMediaRecorder, msg.arg1, msg.arg2);
+
+ return;
+
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+ /**
+ * Called from native code when an interesting event happens. This method
+ * just uses the EventHandler system to post the event back to the main app thread.
+ * We use a weak reference to the original MediaRecorder object so that the native
+ * code is safe from the object disappearing from underneath it. (This is
+ * the cookie passed to native_setup().)
+ */
+ private static void postEventFromNative(Object mediarecorder_ref,
+ int what, int arg1, int arg2, Object obj)
+ {
+ MediaRecorder mr = (MediaRecorder)((WeakReference)mediarecorder_ref).get();
+ if (mr == null) {
+ return;
+ }
+
+ if (mr.mEventHandler != null) {
+ Message m = mr.mEventHandler.obtainMessage(what, arg1, arg2, obj);
+ mr.mEventHandler.sendMessage(m);
+ }
+ }
/**
* Releases resources associated with this MediaRecorder object.
@@ -360,10 +482,10 @@ public class MediaRecorder
*/
public native void release();
- private native final void native_setup() throws IllegalStateException;
-
+ private native final void native_setup(Object mediarecorder_this) throws IllegalStateException;
+
private native final void native_finalize();
-
+
@Override
protected void finalize() { native_finalize(); }
}
diff --git a/media/java/android/media/Ringtone.java b/media/java/android/media/Ringtone.java
index cfcb5eb..e80d8aa 100644
--- a/media/java/android/media/Ringtone.java
+++ b/media/java/android/media/Ringtone.java
@@ -164,9 +164,16 @@ public class Ringtone {
} else if (mFileDescriptor != null) {
mAudio.setDataSource(mFileDescriptor);
} else if (mAssetFileDescriptor != null) {
- mAudio.setDataSource(mAssetFileDescriptor.getFileDescriptor(),
- mAssetFileDescriptor.getStartOffset(),
- mAssetFileDescriptor.getLength());
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (mAssetFileDescriptor.getDeclaredLength() < 0) {
+ mAudio.setDataSource(mAssetFileDescriptor.getFileDescriptor());
+ } else {
+ mAudio.setDataSource(mAssetFileDescriptor.getFileDescriptor(),
+ mAssetFileDescriptor.getStartOffset(),
+ mAssetFileDescriptor.getDeclaredLength());
+ }
} else {
throw new IOException("No data source set.");
}
diff --git a/media/java/android/media/SoundPool.java b/media/java/android/media/SoundPool.java
index 427f173..000430f 100644
--- a/media/java/android/media/SoundPool.java
+++ b/media/java/android/media/SoundPool.java
@@ -16,6 +16,7 @@
package android.media;
+import android.util.AndroidRuntimeException;
import android.util.Log;
import java.io.File;
import java.io.FileDescriptor;
@@ -79,7 +80,11 @@ public class SoundPool
public int load(AssetFileDescriptor afd, int priority) {
if (afd != null) {
- return _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
} else {
return 0;
}
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index 095749b..44f875c 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -41,16 +41,68 @@ using namespace android;
// ----------------------------------------------------------------------------
// helper function to extract a native Camera object from a Camera Java object
-extern sp<Camera> get_native_camera(JNIEnv *env, jobject thiz);
+extern sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, struct camera_context_t** context);
struct fields_t {
jfieldID context;
jfieldID surface;
/* actually in android.view.Surface XXX */
jfieldID surface_native;
+
+ jmethodID post_event;
};
static fields_t fields;
+static Mutex sLock;
+
+// ----------------------------------------------------------------------------
+// ref-counted object for callbacks
+class JNIMediaRecorderListener: public MediaRecorderListener
+{
+public:
+ JNIMediaRecorderListener(JNIEnv* env, jobject thiz, jobject weak_thiz);
+ ~JNIMediaRecorderListener();
+ void notify(int msg, int ext1, int ext2);
+private:
+ JNIMediaRecorderListener();
+ jclass mClass; // Reference to MediaRecorder class
+ jobject mObject; // Weak ref to MediaRecorder Java object to call on
+};
+
+JNIMediaRecorderListener::JNIMediaRecorderListener(JNIEnv* env, jobject thiz, jobject weak_thiz)
+{
+
+ // Hold onto the MediaRecorder class for use in calling the static method
+ // that posts events to the application thread.
+ jclass clazz = env->GetObjectClass(thiz);
+ if (clazz == NULL) {
+ LOGE("Can't find android/media/MediaRecorder");
+ jniThrowException(env, "java/lang/Exception", NULL);
+ return;
+ }
+ mClass = (jclass)env->NewGlobalRef(clazz);
+
+ // We use a weak reference so the MediaRecorder object can be garbage collected.
+ // The reference is only used as a proxy for callbacks.
+ mObject = env->NewGlobalRef(weak_thiz);
+}
+
+JNIMediaRecorderListener::~JNIMediaRecorderListener()
+{
+ // remove global references
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->DeleteGlobalRef(mObject);
+ env->DeleteGlobalRef(mClass);
+}
+
+void JNIMediaRecorderListener::notify(int msg, int ext1, int ext2)
+{
+ LOGV("JNIMediaRecorderListener::notify");
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallStaticVoidMethod(mClass, fields.post_event, mObject, msg, ext1, ext2, 0);
+}
+
// ----------------------------------------------------------------------------
static sp<Surface> get_surface(JNIEnv* env, jobject clazz)
@@ -74,10 +126,32 @@ static bool process_media_recorder_call(JNIEnv *env, status_t opStatus, const ch
return false;
}
+static sp<MediaRecorder> getMediaRecorder(JNIEnv* env, jobject thiz)
+{
+ Mutex::Autolock l(sLock);
+ MediaRecorder* const p = (MediaRecorder*)env->GetIntField(thiz, fields.context);
+ return sp<MediaRecorder>(p);
+}
+
+static sp<MediaRecorder> setMediaRecorder(JNIEnv* env, jobject thiz, const sp<MediaRecorder>& recorder)
+{
+ Mutex::Autolock l(sLock);
+ sp<MediaRecorder> old = (MediaRecorder*)env->GetIntField(thiz, fields.context);
+ if (recorder.get()) {
+ recorder->incStrong(thiz);
+ }
+ if (old != 0) {
+ old->decStrong(thiz);
+ }
+ env->SetIntField(thiz, fields.context, (int)recorder.get());
+ return old;
+}
+
+
static void android_media_MediaRecorder_setCamera(JNIEnv* env, jobject thiz, jobject camera)
{
- sp<Camera> c = get_native_camera(env, camera);
- MediaRecorder *mr = (MediaRecorder*)env->GetIntField(thiz, fields.context);
+ sp<Camera> c = get_native_camera(env, camera, NULL);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setCamera(c->remote()),
"java/lang/RuntimeException", "setCamera failed.");
}
@@ -90,7 +164,7 @@ android_media_MediaRecorder_setVideoSource(JNIEnv *env, jobject thiz, jint vs)
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid video source");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setVideoSource(vs), "java/lang/RuntimeException", "setVideoSource failed.");
}
@@ -102,7 +176,7 @@ android_media_MediaRecorder_setAudioSource(JNIEnv *env, jobject thiz, jint as)
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid audio source");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setAudioSource(as), "java/lang/RuntimeException", "setAudioSource failed.");
}
@@ -114,7 +188,7 @@ android_media_MediaRecorder_setOutputFormat(JNIEnv *env, jobject thiz, jint of)
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid output format");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setOutputFormat(of), "java/lang/RuntimeException", "setOutputFormat failed.");
}
@@ -126,7 +200,7 @@ android_media_MediaRecorder_setVideoEncoder(JNIEnv *env, jobject thiz, jint ve)
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid video encoder");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setVideoEncoder(ve), "java/lang/RuntimeException", "setVideoEncoder failed.");
}
@@ -138,7 +212,7 @@ android_media_MediaRecorder_setAudioEncoder(JNIEnv *env, jobject thiz, jint ae)
jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid audio encoder");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setAudioEncoder(ae), "java/lang/RuntimeException", "setAudioEncoder failed.");
}
@@ -151,7 +225,7 @@ android_media_MediaRecorder_setOutputFileFD(JNIEnv *env, jobject thiz, jobject f
return;
}
int fd = getParcelFileDescriptorFD(env, fileDescriptor);
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
status_t opStatus = mr->setOutputFile(fd, offset, length);
process_media_recorder_call(env, opStatus, "java/io/IOException", "setOutputFile failed.");
}
@@ -160,7 +234,7 @@ static void
android_media_MediaRecorder_setVideoSize(JNIEnv *env, jobject thiz, jint width, jint height)
{
LOGV("setVideoSize(%d, %d)", width, height);
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
if (width <= 0 || height <= 0) {
jniThrowException(env, "java/lang/IllegalArgumentException", "invalid video size");
@@ -177,7 +251,7 @@ android_media_MediaRecorder_setVideoFrameRate(JNIEnv *env, jobject thiz, jint ra
jniThrowException(env, "java/lang/IllegalArgumentException", "invalid frame rate");
return;
}
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->setVideoFrameRate(rate), "java/lang/RuntimeException", "setVideoFrameRate failed.");
}
@@ -185,7 +259,7 @@ static void
android_media_MediaRecorder_prepare(JNIEnv *env, jobject thiz)
{
LOGV("prepare");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
jobject surface = env->GetObjectField(thiz, fields.surface);
if (surface != NULL) {
@@ -202,7 +276,7 @@ static int
android_media_MediaRecorder_native_getMaxAmplitude(JNIEnv *env, jobject thiz)
{
LOGV("getMaxAmplitude");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
int result = 0;
process_media_recorder_call(env, mr->getMaxAmplitude(&result), "java/lang/RuntimeException", "getMaxAmplitude failed.");
return result;
@@ -212,7 +286,7 @@ static void
android_media_MediaRecorder_start(JNIEnv *env, jobject thiz)
{
LOGV("start");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->start(), "java/lang/RuntimeException", "start failed.");
}
@@ -220,46 +294,54 @@ static void
android_media_MediaRecorder_stop(JNIEnv *env, jobject thiz)
{
LOGV("stop");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->stop(), "java/lang/RuntimeException", "stop failed.");
}
static void
-android_media_MediaRecorder_reset(JNIEnv *env, jobject thiz)
+android_media_MediaRecorder_native_reset(JNIEnv *env, jobject thiz)
{
- LOGV("reset");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
- process_media_recorder_call(env, mr->reset(), "java/lang/RuntimeException", "reset failed.");
+ LOGV("native_reset");
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+ process_media_recorder_call(env, mr->reset(), "java/lang/RuntimeException", "native_reset failed.");
}
static void
android_media_MediaRecorder_release(JNIEnv *env, jobject thiz)
{
LOGV("release");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
- env->SetIntField(thiz, fields.context, 0);
- delete mr;
+ sp<MediaRecorder> mr = setMediaRecorder(env, thiz, 0);
+ if (mr != NULL) {
+ mr->setListener(NULL);
+ }
}
static void
-android_media_MediaRecorder_native_setup(JNIEnv *env, jobject thiz)
+android_media_MediaRecorder_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
LOGV("setup");
- MediaRecorder *mr = new MediaRecorder();
- if (mr->initCheck() == NO_ERROR) {
- env->SetIntField(thiz, fields.context, (int)mr);
- } else {
- delete mr;
+ sp<MediaRecorder> mr = new MediaRecorder();
+ if (mr == NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
+ return;
+ }
+ if (mr->initCheck() != NO_ERROR) {
jniThrowException(env, "java/lang/IOException", "Unable to initialize camera");
+ return;
}
+
+ // create new listener and give it to MediaRecorder
+ sp<JNIMediaRecorderListener> listener = new JNIMediaRecorderListener(env, thiz, weak_this);
+ mr->setListener(listener);
+
+ setMediaRecorder(env, thiz, mr);
}
static void
android_media_MediaRecorder_native_finalize(JNIEnv *env, jobject thiz)
{
LOGV("finalize");
- MediaRecorder *mr = (MediaRecorder *)env->GetIntField(thiz, fields.context);
- delete mr;
+ android_media_MediaRecorder_release(env, thiz);
}
// ----------------------------------------------------------------------------
@@ -278,9 +360,9 @@ static JNINativeMethod gMethods[] = {
{"getMaxAmplitude", "()I", (void *)android_media_MediaRecorder_native_getMaxAmplitude},
{"start", "()V", (void *)android_media_MediaRecorder_start},
{"stop", "()V", (void *)android_media_MediaRecorder_stop},
- {"reset", "()V", (void *)android_media_MediaRecorder_reset},
+ {"native_reset", "()V", (void *)android_media_MediaRecorder_native_reset},
{"release", "()V", (void *)android_media_MediaRecorder_release},
- {"native_setup", "()V", (void *)android_media_MediaRecorder_native_setup},
+ {"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaRecorder_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaRecorder_native_finalize},
};
@@ -320,6 +402,13 @@ int register_android_media_MediaRecorder(JNIEnv *env)
return -1;
}
+ fields.post_event = env->GetStaticMethodID(clazz, "postEventFromNative",
+ "(Ljava/lang/Object;IIILjava/lang/Object;)V");
+ if (fields.post_event == NULL) {
+ LOGE("Can't find MediaRecorder.postEventFromNative");
+ return -1;
+ }
+
return AndroidRuntime::registerNativeMethods(env,
"android/media/MediaRecorder", gMethods, NELEM(gMethods));
}
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index e833c85..7594ff0 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -128,8 +128,23 @@ status_t AudioRecord::set(
return BAD_VALUE;
}
- // TODO: Get input frame count from hardware.
- int minFrameCount = 1024*2;
+ // validate framecount
+ size_t inputBuffSizeInBytes = -1;
+ if (AudioSystem::getInputBufferSize(sampleRate, format, channelCount, &inputBuffSizeInBytes)
+ != NO_ERROR) {
+ LOGE("AudioSystem could not query the input buffer size.");
+ return NO_INIT;
+ }
+ if (inputBuffSizeInBytes == 0) {
+ LOGE("Recording parameters are not supported: sampleRate %d, channelCount %d, format %d",
+ sampleRate, channelCount, format);
+ return BAD_VALUE;
+ }
+ int frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? 2 : 1);
+
+ // We use 2* size of input buffer for ping pong use of record buffer.
+ int minFrameCount = 2 * inputBuffSizeInBytes / frameSizeInBytes;
+ LOGV("AudioRecord::set() minFrameCount = %d", minFrameCount);
if (frameCount == 0) {
frameCount = minFrameCount;
@@ -144,7 +159,11 @@ status_t AudioRecord::set(
// open record channel
status_t status;
sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), streamType,
- sampleRate, format, channelCount, frameCount, flags, &status);
+ sampleRate, format,
+ channelCount,
+ frameCount,
+ ((uint16_t)flags) << 16,
+ &status);
if (record == 0) {
LOGE("AudioFlinger could not create record track, status: %d", status);
return status;
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index f8520a7..2274521 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -168,6 +168,8 @@ status_t AudioTrack::set(
// Ensure that buffer depth covers at least audio hardware latency
uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
+ if (minBufCount < 2) minBufCount = 2;
+
// When playing from shared buffer, playback will start even if last audioflinger
// block is partly filled.
if (sharedBuffer != 0 && minBufCount > 1) {
@@ -437,8 +439,8 @@ void AudioTrack::setSampleRate(int rate)
return;
}
// Resampler implementation limits input sampling rate to 2 x output sampling rate.
+ if (rate <= 0) rate = 1;
if (rate > afSamplingRate*2) rate = afSamplingRate*2;
-
if (rate > MAX_SAMPLE_RATE) rate = MAX_SAMPLE_RATE;
mCblk->sampleRate = rate;
@@ -466,10 +468,15 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
if (loopStart >= loopEnd ||
loopEnd - loopStart > mFrameCount) {
- LOGW("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, user %d", loopStart, loopEnd, loopCount, mFrameCount, cblk->user);
+ LOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, user %d", loopStart, loopEnd, loopCount, mFrameCount, cblk->user);
return BAD_VALUE;
}
- // TODO handle shared buffer here: limit loop end to framecount
+
+ if ((mSharedBuffer != 0) && (loopEnd > mFrameCount)) {
+ LOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, framecount %d",
+ loopStart, loopEnd, mFrameCount);
+ return BAD_VALUE;
+ }
cblk->loopStart = loopStart;
cblk->loopEnd = loopEnd;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 507d03e..f187bf5 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -21,6 +21,7 @@
#include <utils/Parcel.h>
#include <ui/ISurface.h>
#include <ui/ICamera.h>
+#include <media/IMediaPlayerClient.h>
#include <media/IMediaRecorder.h>
namespace android {
@@ -44,7 +45,8 @@ enum {
SET_VIDEO_SIZE,
SET_VIDEO_FRAMERATE,
SET_PREVIEW_SURFACE,
- SET_CAMERA
+ SET_CAMERA,
+ SET_LISTENER
};
class BpMediaRecorder: public BpInterface<IMediaRecorder>
@@ -176,6 +178,16 @@ public:
return reply.readInt32();
}
+ status_t setListener(const sp<IMediaPlayerClient>& listener)
+ {
+ LOGV("setListener(%p)", listener.get());
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ data.writeStrongBinder(listener->asBinder());
+ remote()->transact(SET_LISTENER, data, &reply);
+ return reply.readInt32();
+ }
+
status_t prepare()
{
LOGV("prepare");
@@ -373,6 +385,14 @@ status_t BnMediaRecorder::onTransact(
reply->writeInt32(setVideoFrameRate(frames_per_second));
return NO_ERROR;
} break;
+ case SET_LISTENER: {
+ LOGV("SET_LISTENER");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ sp<IMediaPlayerClient> listener =
+ interface_cast<IMediaPlayerClient>(data.readStrongBinder());
+ reply->writeInt32(setListener(listener));
+ return NO_ERROR;
+ } break;
case SET_PREVIEW_SURFACE: {
LOGV("SET_PREVIEW_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 4ab26ac..98aac39 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -58,6 +58,10 @@ status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface)
LOGE("setPreviewSurface called in an invalid state(%d)", mCurrentState);
return INVALID_OPERATION;
}
+ if (!mIsVideoSourceSet) {
+ LOGE("try to set preview surface without setting the video source first");
+ return INVALID_OPERATION;
+ }
status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
if (OK != ret) {
@@ -86,6 +90,14 @@ status_t MediaRecorder::init()
mCurrentState = MEDIA_RECORDER_ERROR;
return UNKNOWN_ERROR;
}
+
+ ret = mMediaRecorder->setListener(this);
+ if (OK != ret) {
+ LOGV("setListener failed: %d", ret);
+ mCurrentState = MEDIA_RECORDER_ERROR;
+ return UNKNOWN_ERROR;
+ }
+
mCurrentState = MEDIA_RECORDER_INITIALIZED;
return ret;
}
@@ -167,6 +179,10 @@ status_t MediaRecorder::setOutputFormat(int of)
LOGE("setOutputFormat called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
+ if (mIsVideoSourceSet && of >= OUTPUT_FORMAT_RAW_AMR) {
+ LOGE("output format (%d) is meant for audio recording only and incompatible with video recording", of);
+ return INVALID_OPERATION;
+ }
status_t ret = mMediaRecorder->setOutputFormat(of);
if (OK != ret) {
@@ -185,6 +201,10 @@ status_t MediaRecorder::setVideoEncoder(int ve)
LOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
+ if (!mIsVideoSourceSet) {
+ LOGE("try to set the video encoder without setting the video source first");
+ return INVALID_OPERATION;
+ }
if (mIsVideoEncoderSet) {
LOGE("video encoder has already been set");
return INVALID_OPERATION;
@@ -211,6 +231,10 @@ status_t MediaRecorder::setAudioEncoder(int ae)
LOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
+ if (!mIsAudioSourceSet) {
+ LOGE("try to set the audio encoder without setting the audio source first");
+ return INVALID_OPERATION;
+ }
if (mIsAudioEncoderSet) {
LOGE("audio encoder has already been set");
return INVALID_OPERATION;
@@ -293,6 +317,10 @@ status_t MediaRecorder::setVideoSize(int width, int height)
LOGE("setVideoSize called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
+ if (!mIsVideoSourceSet) {
+ LOGE("try to set video size without setting video source first");
+ return INVALID_OPERATION;
+ }
status_t ret = mMediaRecorder->setVideoSize(width, height);
if (OK != ret) {
@@ -314,6 +342,10 @@ status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
LOGE("setVideoFrameRate called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
+ if (!mIsVideoSourceSet) {
+ LOGE("try to set video frame rate without setting video source first");
+ return INVALID_OPERATION;
+ }
status_t ret = mMediaRecorder->setVideoFrameRate(frames_per_second);
if (OK != ret) {
@@ -335,6 +367,23 @@ status_t MediaRecorder::prepare()
LOGE("prepare called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
+ if (mIsAudioSourceSet != mIsAudioEncoderSet) {
+ if (mIsAudioSourceSet) {
+ LOGE("audio source is set, but audio encoder is not set");
+ } else { // must not happen, since setAudioEncoder checks this already
+ LOGE("audio encoder is set, but audio source is not set");
+ }
+ return INVALID_OPERATION;
+ }
+
+ if (mIsVideoSourceSet != mIsVideoEncoderSet) {
+ if (mIsVideoSourceSet) {
+ LOGE("video source is set, but video encoder is not set");
+ } else { // must not happen, since setVideoEncoder checks this already
+ LOGE("video encoder is set, but video source is not set");
+ }
+ return INVALID_OPERATION;
+ }
status_t ret = mMediaRecorder->prepare();
if (OK != ret) {
@@ -538,5 +587,31 @@ MediaRecorder::~MediaRecorder()
}
}
+status_t MediaRecorder::setListener(const sp<MediaRecorderListener>& listener)
+{
+ LOGV("setListener");
+ Mutex::Autolock _l(mLock);
+ mListener = listener;
+
+ return NO_ERROR;
+}
+
+void MediaRecorder::notify(int msg, int ext1, int ext2)
+{
+ LOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
+
+ sp<MediaRecorderListener> listener;
+ mLock.lock();
+ listener = mListener;
+ mLock.unlock();
+
+ if (listener != NULL) {
+ Mutex::Autolock _l(mNotifyLock);
+ LOGV("callback application");
+ listener->notify(msg, ext1, ext2);
+ LOGV("back from callback");
+ }
+}
+
}; // namespace android
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 97e3536..40705c6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -218,6 +218,104 @@ static int myTid() {
#endif
}
+#if defined(__arm__)
+extern "C" void get_malloc_leak_info(uint8_t** info, size_t* overallSize,
+ size_t* infoSize, size_t* totalMemory, size_t* backtraceSize);
+extern "C" void free_malloc_leak_info(uint8_t* info);
+
+void memStatus(int fd, const Vector<String16>& args)
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+
+ typedef struct {
+ size_t size;
+ size_t dups;
+ intptr_t * backtrace;
+ } AllocEntry;
+
+ uint8_t *info = NULL;
+ size_t overallSize = 0;
+ size_t infoSize = 0;
+ size_t totalMemory = 0;
+ size_t backtraceSize = 0;
+
+ get_malloc_leak_info(&info, &overallSize, &infoSize, &totalMemory, &backtraceSize);
+ if (info) {
+ uint8_t *ptr = info;
+ size_t count = overallSize / infoSize;
+
+ snprintf(buffer, SIZE, " Allocation count %i\n", count);
+ result.append(buffer);
+
+ AllocEntry * entries = new AllocEntry[count];
+
+ for (size_t i = 0; i < count; i++) {
+ // Each entry should be size_t, size_t, intptr_t[backtraceSize]
+ AllocEntry *e = &entries[i];
+
+ e->size = *reinterpret_cast<size_t *>(ptr);
+ ptr += sizeof(size_t);
+
+ e->dups = *reinterpret_cast<size_t *>(ptr);
+ ptr += sizeof(size_t);
+
+ e->backtrace = reinterpret_cast<intptr_t *>(ptr);
+ ptr += sizeof(intptr_t) * backtraceSize;
+ }
+
+ // Now we need to sort the entries. They come sorted by size but
+ // not by stack trace which causes problems using diff.
+ bool moved;
+ do {
+ moved = false;
+ for (size_t i = 0; i < (count - 1); i++) {
+ AllocEntry *e1 = &entries[i];
+ AllocEntry *e2 = &entries[i+1];
+
+ bool swap = e1->size < e2->size;
+ if (e1->size == e2->size) {
+ for(size_t j = 0; j < backtraceSize; j++) {
+ if (e1->backtrace[j] == e2->backtrace[j]) {
+ continue;
+ }
+ swap = e1->backtrace[j] < e2->backtrace[j];
+ break;
+ }
+ }
+ if (swap) {
+ AllocEntry t = entries[i];
+ entries[i] = entries[i+1];
+ entries[i+1] = t;
+ moved = true;
+ }
+ }
+ } while (moved);
+
+ for (size_t i = 0; i < count; i++) {
+ AllocEntry *e = &entries[i];
+
+ snprintf(buffer, SIZE, "size %8i, dup %4i", e->size, e->dups);
+ result.append(buffer);
+ for (size_t ct = 0; (ct < backtraceSize) && e->backtrace[ct]; ct++) {
+ if (ct) {
+ result.append(", ");
+ }
+ snprintf(buffer, SIZE, "0x%08x", e->backtrace[ct]);
+ result.append(buffer);
+ }
+ result.append("\n");
+ }
+
+ delete[] entries;
+ free_malloc_leak_info(info);
+ }
+
+ write(fd, result.string(), result.size());
+}
+#endif
+
status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
@@ -300,6 +398,18 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
result.append(buffer);
result.append("\n");
}
+
+#if defined(__arm__)
+ bool dumpMem = false;
+ for (size_t i = 0; i < args.size(); i++) {
+ if (args[i] == String16("-m")) {
+ dumpMem = true;
+ }
+ }
+ if (dumpMem) {
+ memStatus(fd, args);
+ }
+#endif
}
write(fd, result.string(), result.size());
return NO_ERROR;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index e8ba17f..4b45acb 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -258,5 +258,16 @@ MediaRecorderClient::~MediaRecorderClient()
release();
}
+status_t MediaRecorderClient::setListener(const sp<IMediaPlayerClient>& listener)
+{
+ LOGV("setListener");
+ Mutex::Autolock lock(mLock);
+ if (mRecorder == NULL) {
+ LOGE("recorder is not initialized");
+ return NO_INIT;
+ }
+ return mRecorder->setListener(listener);
+}
+
}; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 2b80c10..93fd802 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -39,6 +39,7 @@ public:
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
+ virtual status_t setListener(const sp<IMediaPlayerClient>& listener);
virtual status_t prepare();
virtual status_t getMaxAmplitude(int* max);
virtual status_t start();
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
index 453a165..73688cc 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java
@@ -47,9 +47,9 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner {
suite.addTestSuite(MediaPlayerApiTest.class);
suite.addTestSuite(SimTonesTest.class);
suite.addTestSuite(MediaMetadataTest.class);
- // suite.addTestSuite(CameraTest.class);
+ suite.addTestSuite(CameraTest.class);
suite.addTestSuite(MediaRecorderTest.class);
- suite.addTestSuite(MediaAudioTrackTest.class);
+ //suite.addTestSuite(MediaAudioTrackTest.class);
return suite;
}
@@ -59,3 +59,4 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner {
}
}
+
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java
index 5981a13..59803f7 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java
@@ -136,11 +136,13 @@ public class CameraTest extends ActivityInstrumentationTestCase<MediaFrameworkTe
//Implement the RawPictureCallback
private final class RawPictureCallback implements PictureCallback {
public void onPictureTaken(byte [] rawData, Camera camera) {
- if (rawData != null) {
- rawPictureCallbackResult = true;
- } else {
- rawPictureCallbackResult = false;
- }
+ // no support for raw data - success if we get the callback
+ rawPictureCallbackResult = true;
+ //if (rawData != null) {
+ // rawPictureCallbackResult = true;
+ //} else {
+ // rawPictureCallbackResult = false;
+ //}
Log.v(TAG, "RawPictureCallback callback");
}
};
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
index b6a0848..24edb65 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java
@@ -34,7 +34,7 @@ import android.test.suitebuilder.annotation.Suppress;
*/
public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> {
- private String TAG = "MediaAudioTrack";
+ private String TAG = "MediaAudioTrackTest";
public MediaAudioTrackTest() {
super("com.android.mediaframeworktest", MediaFrameworkTest.class);
@@ -50,6 +50,18 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
super.tearDown();
}
+ private static void assumeTrue(String message, boolean cond) {
+ assertTrue("(assume)"+message, cond);
+ }
+
+ private void log(String testName, String message) {
+ Log.v(TAG, "["+testName+"] "+message);
+ }
+
+ private void loge(String testName, String message) {
+ Log.e(TAG, "["+testName+"] "+message);
+ }
+
//-----------------------------------------------------------------
// private class to hold test reslts
public class TestResults {
@@ -62,7 +74,8 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
// generic test methods
public TestResults constructorTestMultiSampleRate(
// parameters tested by this method
- int _inTest_streamType, int _inTest_mode, int _inTest_config,
+ int _inTest_streamType, int _inTest_mode,
+ int _inTest_config, int _inTest_format,
// parameter-dependent expected results
int _expected_stateForMode) {
@@ -70,7 +83,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
String failedRates = "Failure for rate(s): ";
boolean localRes, finalRes = true;
- for(int i = 0 ; i < testSampleRates.length ; i++) {
+ for (int i = 0 ; i < testSampleRates.length ; i++) {
//Log.v("MediaAudioTrackTest", "[ constructorTestMultiSampleRate ] testing "+ testSampleRates[i]);
AudioTrack track = null;
try {
@@ -78,15 +91,16 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
_inTest_streamType,
testSampleRates[i],
_inTest_config,
- AudioFormat.ENCODING_PCM_16BIT,
+ _inTest_format,
AudioTrack.getMinBufferSize(testSampleRates[i],
- _inTest_config, AudioFormat.ENCODING_PCM_16BIT),//testSampleRates[i]*4
+ _inTest_config, _inTest_format),
_inTest_mode);
} catch(IllegalArgumentException iae) {
Log.e("MediaAudioTrackTest", "[ constructorTestMultiSampleRate ] exception at SR "
+ testSampleRates[i]+": \n" + iae);
+ localRes = false;
}
- if(track != null) {
+ if (track != null) {
localRes = (track.getState() == _expected_stateForMode);
track.release();
}
@@ -98,11 +112,11 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
//log the error for the test runner
failedRates += Integer.toString(testSampleRates[i]) + "Hz ";
//log the error for logcat
- Log.e("MediaAudioTrackTest", "[ constructorTestMultiSampleRate ] failed to construct "
+ log("constructorTestMultiSampleRate", "failed to construct "
+"AudioTrack(streamType="+_inTest_streamType
+", sampleRateInHz=" + testSampleRates[i]
+", channelConfig=" + _inTest_config
- +", audioFormat=AudioFormat.ENCODING_PCM_16BIT"
+ +", audioFormat=" + _inTest_format
+", bufferSizeInBytes=" + AudioTrack.getMinBufferSize(testSampleRates[i],
_inTest_config, AudioFormat.ENCODING_PCM_16BIT)
+", mode="+ _inTest_mode );
@@ -118,16 +132,16 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
//----------------------------------
//-----------------------------------------------------------------
- // AudioTrack constructor and AudioTrack.getMinBufferSize(...)
+ // AudioTrack constructor and AudioTrack.getMinBufferSize(...) for 16bit PCM
//----------------------------------
//Test case 1: constructor for streaming AudioTrack, mono, 16bit at misc valid sample rates
- @MediumTest
+ @LargeTest
public void testConstructorMono16MusicStream() throws Exception {
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_MONO,
+ AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorMono16MusicStream: " + res.mResultLog, res.mResult);
@@ -135,12 +149,12 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
//Test case 2: constructor for streaming AudioTrack, stereo, 16bit at misc valid sample rates
- @MediumTest
+ @LargeTest
public void testConstructorStereo16MusicStream() throws Exception {
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO,
+ AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_INITIALIZED);
assertTrue("testConstructorStereo16MusicStream: " + res.mResultLog, res.mResult);
@@ -148,12 +162,12 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
//Test case 3: constructor for static AudioTrack, mono, 16bit at misc valid sample rates
- @MediumTest
+ @LargeTest
public void testConstructorMono16MusicStatic() throws Exception {
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_MONO,
+ AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorMono16MusicStatic: " + res.mResultLog, res.mResult);
@@ -161,16 +175,723 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF
//Test case 4: constructor for static AudioTrack, stereo, 16bit at misc valid sample rates
- @MediumTest
+ @LargeTest
public void testConstructorStereo16MusicStatic() throws Exception {
TestResults res = constructorTestMultiSampleRate(
AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
- AudioFormat.CHANNEL_CONFIGURATION_STEREO,
+ AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.STATE_NO_STATIC_DATA);
assertTrue("testConstructorStereo16MusicStatic: " + res.mResultLog, res.mResult);
}
+
+
+ //-----------------------------------------------------------------
+ // AudioTrack constructor and AudioTrack.getMinBufferSize(...) for 8bit PCM
+ //----------------------------------
+
+ //Test case 1: constructor for streaming AudioTrack, mono, 8bit at misc valid sample rates
+ @LargeTest
+ public void testConstructorMono8MusicStream() throws Exception {
+
+ TestResults res = constructorTestMultiSampleRate(
+ AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
+ AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioTrack.STATE_INITIALIZED);
+
+ assertTrue("testConstructorMono8MusicStream: " + res.mResultLog, res.mResult);
+ }
+
+ //Test case 2: constructor for streaming AudioTrack, stereo, 8bit at misc valid sample rates
+ @LargeTest
+ public void testConstructorStereo8MusicStream() throws Exception {
+
+ TestResults res = constructorTestMultiSampleRate(
+ AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM,
+ AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioTrack.STATE_INITIALIZED);
+
+ assertTrue("testConstructorStereo8MusicStream: " + res.mResultLog, res.mResult);
+ }
+
+ //Test case 3: constructor for static AudioTrack, mono, 8bit at misc valid sample rates
+ @LargeTest
+ public void testConstructorMono8MusicStatic() throws Exception {
+
+ TestResults res = constructorTestMultiSampleRate(
+ AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
+ AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioTrack.STATE_NO_STATIC_DATA);
+
+ assertTrue("testConstructorMono8MusicStatic: " + res.mResultLog, res.mResult);
+ }
+
+ //Test case 4: constructor for static AudioTrack, stereo, 8bit at misc valid sample rates
+ @LargeTest
+ public void testConstructorStereo8MusicStatic() throws Exception {
+
+ TestResults res = constructorTestMultiSampleRate(
+ AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC,
+ AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT,
+ AudioTrack.STATE_NO_STATIC_DATA);
+
+ assertTrue("testConstructorStereo8MusicStatic: " + res.mResultLog, res.mResult);
+ }
+
+
+ //-----------------------------------------------------------------
+ // AudioTrack constructor for all stream types
+ //----------------------------------
+
+ //Test case 1: constructor for all stream types
+ @LargeTest
+ public void testConstructorStreamType() throws Exception {
+ // constants for test
+ final int TYPE_TEST_SR = 22050;
+ final int TYPE_TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TYPE_TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TYPE_TEST_MODE = AudioTrack.MODE_STREAM;
+ final int[] STREAM_TYPES = { AudioManager.STREAM_ALARM, AudioManager.STREAM_BLUETOOTH_SCO,
+ AudioManager.STREAM_MUSIC, AudioManager.STREAM_NOTIFICATION,
+ AudioManager.STREAM_RING, AudioManager.STREAM_SYSTEM,
+ AudioManager.STREAM_VOICE_CALL };
+ final String[] STREAM_NAMES = { "STREAM_ALARM", "STREAM_BLUETOOTH_SCO", "STREAM_MUSIC",
+ "STREAM_NOTIFICATION", "STREAM_RING", "STREAM_SYSTEM", "STREAM_VOICE_CALL" };
+
+ boolean localTestRes = true;
+ AudioTrack track = null;
+ // test: loop constructor on all stream types
+ for (int i = 0 ; i < STREAM_TYPES.length ; i++)
+ {
+ try {
+ //-------- initialization --------------
+ track = new AudioTrack(STREAM_TYPES[i],
+ TYPE_TEST_SR, TYPE_TEST_CONF, TYPE_TEST_FORMAT,
+ AudioTrack.getMinBufferSize(TYPE_TEST_SR, TYPE_TEST_CONF, TYPE_TEST_FORMAT),
+ TYPE_TEST_MODE);
+ } catch (IllegalArgumentException iae) {
+ loge("testConstructorStreamType", "exception for stream type "
+ + STREAM_NAMES[i] + ": "+ iae);
+ localTestRes = false;
+ }
+ //-------- test --------------
+ if (track != null) {
+ if (track.getState() != AudioTrack.STATE_INITIALIZED) {
+ localTestRes = false;
+ Log.e("MediaAudioTrackTest",
+ "[ testConstructorStreamType ] failed for stream type "+STREAM_NAMES[i]);
+ }
+ //-------- tear down --------------
+ track.release();
+ }
+ else {
+ localTestRes = false;
+ }
+ }
+ assertTrue("testConstructorStreamType", localTestRes);
+ }
+
+
+ //-----------------------------------------------------------------
+ // Playback head position
+ //----------------------------------
+
+ //Test case 1: getPlaybackHeadPosition() at 0 after initialization
+ @LargeTest
+ public void testPlaybackHeadPositionAfterInit() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testPlaybackHeadPositionAfterInit";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT), TEST_MODE);
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME, track.getPlaybackHeadPosition() == 0);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 2: getPlaybackHeadPosition() increases after play()
+ @LargeTest
+ public void testPlaybackHeadPositionIncrease() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testPlaybackHeadPositionIncrease";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ Thread.sleep(100);
+ log(TEST_NAME, "position ="+ track.getPlaybackHeadPosition());
+ assertTrue(TEST_NAME, track.getPlaybackHeadPosition() > 0);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 3: getPlaybackHeadPosition() is 0 after flush();
+ @LargeTest
+ public void testPlaybackHeadPositionAfterFlush() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testPlaybackHeadPositionAfterFlush";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ Thread.sleep(100);
+ track.stop();
+ track.flush();
+ log(TEST_NAME, "position ="+ track.getPlaybackHeadPosition());
+ assertTrue(TEST_NAME, track.getPlaybackHeadPosition() == 0);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 3: getPlaybackHeadPosition() is 0 after stop();
+ @LargeTest
+ public void testPlaybackHeadPositionAfterStop() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testPlaybackHeadPositionAfterStop";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ Thread.sleep(100);
+ track.stop();
+ Thread.sleep(100); // TODO: what is a sensible value?
+ int pos = track.getPlaybackHeadPosition();
+ log(TEST_NAME, "position ="+ pos);
+ assertTrue(TEST_NAME, pos == 0);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 4: getPlaybackHeadPosition() is > 0 after play(); pause();
+ @LargeTest
+ public void testPlaybackHeadPositionAfterPause() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testPlaybackHeadPositionAfterPause";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ Thread.sleep(100);
+ track.pause();
+ int pos = track.getPlaybackHeadPosition();
+ log(TEST_NAME, "position ="+ pos);
+ assertTrue(TEST_NAME, pos > 0);
+ //-------- tear down --------------
+ track.release();
+ }
+
+
+ //-----------------------------------------------------------------
+ // Playback properties
+ //----------------------------------
+
+ //Test case 1: setStereoVolume() with max volume returns SUCCESS
+ @LargeTest
+ public void testSetStereoVolumeMax() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetStereoVolumeMax";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ float maxVol = AudioTrack.getMaxVolume();
+ assertTrue(TEST_NAME, track.setStereoVolume(maxVol, maxVol) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 2: setStereoVolume() with min volume returns SUCCESS
+ @LargeTest
+ public void testSetStereoVolumeMin() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetStereoVolumeMin";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ float minVol = AudioTrack.getMinVolume();
+ assertTrue(TEST_NAME, track.setStereoVolume(minVol, minVol) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 3: setStereoVolume() with mid volume returns SUCCESS
+ @LargeTest
+ public void testSetStereoVolumeMid() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetStereoVolumeMid";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ float midVol = (AudioTrack.getMaxVolume() - AudioTrack.getMinVolume()) / 2;
+ assertTrue(TEST_NAME, track.setStereoVolume(midVol, midVol) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 4: setPlaybackRate() with half the content rate returns SUCCESS
+ @LargeTest
+ public void testSetPlaybackRate() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackRate";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.play();
+ assertTrue(TEST_NAME, track.setPlaybackRate((int)(TEST_SR/2)) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 5: setPlaybackRate(0) returns bad value error
+ @LargeTest
+ public void testSetPlaybackRateZero() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackRateZero";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME, track.setPlaybackRate(0) == AudioTrack.ERROR_BAD_VALUE);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 6: setPlaybackRate() accepts values twice the output sample rate
+ @LargeTest
+ public void testSetPlaybackRateTwiceOutputSR() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackRateTwiceOutputSR";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ int outputSR = AudioTrack.getNativeOutputSampleRate(TEST_STREAM_TYPE);
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.play();
+ assertTrue(TEST_NAME, track.setPlaybackRate(2*outputSR) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+/*
+ //Test case 7: setPlaybackRate() clips values over twice the output sample rate
+ @LargeTest
+ public void testSetPlaybackRateClip() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackRateClip";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize/2];
+ int outputSR = AudioTrack.getNativeOutputSampleRate(TEST_STREAM_TYPE);
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.play();
+ track.setPlaybackRate(3*outputSR);
+ assertTrue(TEST_NAME, track.getSampleRate() == 2*outputSR);
+ //-------- tear down --------------
+ track.release();
+ }
+*/
+ //Test case 8: setPlaybackRate() invalid operation if track not initialized
+ @LargeTest
+ public void testSetPlaybackRateUninit() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackRateUninit";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STATIC;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_NO_STATIC_DATA);
+ assertTrue(TEST_NAME, track.setPlaybackRate(TEST_SR/2) == AudioTrack.ERROR_INVALID_OPERATION);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //-----------------------------------------------------------------
+ // Playback progress
+ //----------------------------------
+
+ //Test case 1: setPlaybackHeadPosition() on playing track
+ @LargeTest
+ public void testSetPlaybackHeadPositionPlaying() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackHeadPositionPlaying";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ 2*minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ assertTrue(TEST_NAME,
+ track.setPlaybackHeadPosition(10) == AudioTrack.ERROR_INVALID_OPERATION);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 2: setPlaybackHeadPosition() on stopped track
+ @LargeTest
+ public void testSetPlaybackHeadPositionStopped() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackHeadPositionStopped";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ 2*minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ track.stop();
+ assumeTrue(TEST_NAME, track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
+ assertTrue(TEST_NAME, track.setPlaybackHeadPosition(10) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 3: setPlaybackHeadPosition() on paused track
+ @LargeTest
+ public void testSetPlaybackHeadPositionPaused() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackHeadPositionPaused";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ 2*minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ track.pause();
+ assumeTrue(TEST_NAME, track.getPlayState() == AudioTrack.PLAYSTATE_PAUSED);
+ assertTrue(TEST_NAME, track.setPlaybackHeadPosition(10) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 4: setPlaybackHeadPosition() beyond what has been written
+ @LargeTest
+ public void testSetPlaybackHeadPositionTooFar() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetPlaybackHeadPositionTooFar";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ 2*minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ // make up a frame index that's beyond what has been written: go from buffer size to frame
+ // count (given the audio track properties), and add 77.
+ int frameIndexTooFar = (2*minBuffSize/2) + 77;
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ track.write(data, 0, data.length);
+ track.write(data, 0, data.length);
+ track.play();
+ track.stop();
+ assumeTrue(TEST_NAME, track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
+ assertTrue(TEST_NAME, track.setPlaybackHeadPosition(frameIndexTooFar) == AudioTrack.ERROR_BAD_VALUE);
+ //-------- tear down --------------
+ track.release();
+ }
+
+
+ //Test case 5: setLoopPoints() fails for MODE_STREAM
+ @LargeTest
+ public void testSetLoopPointsStream() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetLoopPointsStream";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STREAM;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ 2*minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME, track.setLoopPoints(2, 50, 2) == AudioTrack.ERROR_INVALID_OPERATION);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 6: setLoopPoints() fails start > end
+ @LargeTest
+ public void testSetLoopPointsStartAfterEnd() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetLoopPointsStartAfterEnd";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STATIC;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME, track.setLoopPoints(50, 0, 2) == AudioTrack.ERROR_BAD_VALUE);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 6: setLoopPoints() success
+ @LargeTest
+ public void testSetLoopPointsSuccess() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetLoopPointsSuccess";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STATIC;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ //-------- test --------------
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME, track.setLoopPoints(0, 50, 2) == AudioTrack.SUCCESS);
+ //-------- tear down --------------
+ track.release();
+ }
+
+ //Test case 7: setLoopPoints() fails with loop length bigger than content
+ @LargeTest
+ public void testSetLoopPointsLoopTooLong() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetLoopPointsLoopTooLong";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STATIC;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ int dataSizeInFrames = minBuffSize/2;
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_NO_STATIC_DATA);
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME,
+ track.setLoopPoints(10, dataSizeInFrames+20, 2) == AudioTrack.ERROR_BAD_VALUE);
+ //-------- tear down --------------
+ track.release();
+ }
+/*
+ //Test case 7: setLoopPoints() fails with start beyond what can be written for the track
+ @LargeTest
+ public void testSetLoopPointsStartTooFar() throws Exception {
+ // constants for test
+ final String TEST_NAME = "testSetLoopPointsStartTooFar";
+ final int TEST_SR = 22050;
+ final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ final int TEST_MODE = AudioTrack.MODE_STATIC;
+ final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+
+ //-------- initialization --------------
+ int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
+ AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
+ minBuffSize, TEST_MODE);
+ byte data[] = new byte[minBuffSize];
+ int dataSizeInFrames = minBuffSize/2;//16bit data
+ //-------- test --------------
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_NO_STATIC_DATA);
+ track.write(data, 0, data.length);
+ assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
+ assertTrue(TEST_NAME,
+ track.setLoopPoints(dataSizeInFrames+20, dataSizeInFrames+50, 2)
+ == AudioTrack.ERROR_BAD_VALUE);
+ //-------- tear down --------------
+ track.release();
+ }
+*/
+
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
index b606f25..2f0173d 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java
@@ -1,17 +1,17 @@
/*
* Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
*/
package com.android.mediaframeworktest.performance;
@@ -19,141 +19,358 @@ package com.android.mediaframeworktest.performance;
import com.android.mediaframeworktest.MediaFrameworkTest;
import com.android.mediaframeworktest.MediaNames;
-import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.media.MediaPlayer;
+import android.media.MediaRecorder;
import android.os.SystemClock;
import android.test.ActivityInstrumentationTestCase;
import android.test.suitebuilder.annotation.LargeTest;
import android.test.suitebuilder.annotation.Suppress;
import android.util.Log;
+import android.view.SurfaceHolder;
import java.io.FileDescriptor;
import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Writer;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.BufferedWriter;
+
import android.media.MediaMetadataRetriever;
/**
- * Junit / Instrumentation test case for the media player api
-
- */
-public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<MediaFrameworkTest> {
-
-
- private boolean mIsPlaying = true;
- private String TAG = "MediaPlayerApiTest";
- Context mContext;
- private SQLiteDatabase mDB;
-
-
- public MediaPlayerPerformance() {
- super("com.android.mediaframeworktest", MediaFrameworkTest.class);
- }
+ * Junit / Instrumentation - performance measurement for media player and
+ * recorder
+ */
+public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<MediaFrameworkTest> {
+
+ private String TAG = "MediaFrameworkPerformance";
+
+ private SQLiteDatabase mDB;
+ private SurfaceHolder mSurfaceHolder = null;
+ private static final int NUM_STRESS_LOOP = 10;
+ private static final int NUM_PLAYBACk_IN_EACH_LOOP = 20;
+ private static final long MEDIA_STRESS_WAIT_TIME = 5000; //5 seconds
+ private static final String H263_VIDEO_PLAYBACK_MEMOUT =
+ "/sdcard/h263VideoPlaybackMemOut.txt";
+ private static final String H264_VIDEO_PLAYBACK_MEMOUT =
+ "/sdcard/h264VideoPlaybackMemOut.txt";
+ private static final String WMV_VIDEO_PLAYBACK_MEMOUT =
+ "/sdcard/WmvVideoPlaybackMemOut.txt";
+ private static final String H263_VIDEO_ONLY_RECORD_MEMOUT =
+ "/sdcard/recordH263VideoOnlyMemOut.txt";
+ private static final String MP4_VIDEO_ONLY_RECORD_MEMOUT =
+ "/sdcard/recordMPEG4VideoOnlyMemOut.txt";
+ private static final String H263_VIDEO_AUDIO_RECORD_MEMOUT =
+ "/sdcard/recordVideoH263AudioMemOut.txt";
+ private static final String AUDIO_ONLY_RECORD_MEMOUT =
+ "/sdcard/recordAudioOnlyMemOut.txt";
+
+
+ public MediaPlayerPerformance() {
+ super("com.android.mediaframeworktest", MediaFrameworkTest.class);
+ }
protected void setUp() throws Exception {
-
- super.setUp();
- }
-
- public void createDB(){
- mDB = SQLiteDatabase.openOrCreateDatabase("/sdcard/perf.db",null);
- mDB.execSQL("CREATE TABLE perfdata (_id INTEGER PRIMARY KEY,"
- + "file TEXT," + "setdatatime LONG," +"preparetime LONG," +"playtime LONG" + ");");
- }
-
- public void audioPlaybackStartupTime(String[] testFile){
- long t1 = 0;
- long t2 = 0;
- long t3 = 0;
- long t4 =0;
-
- long setDataSourceDuration = 0;
- long prepareDuration = 0;
- long startDuration=0;
-
- long totalSetDataTime=0;
- long totalPrepareTime=0;
- long totalStartDuration=0;
-
- int numberOfFiles = testFile.length;
- Log.v(TAG, "File lenght " + numberOfFiles);
- for (int k=0; k<numberOfFiles; k++){
- MediaPlayer mp = new MediaPlayer();
- try{
- t1 = SystemClock.uptimeMillis();
- FileInputStream fis = new FileInputStream(testFile[k]);
- FileDescriptor fd = fis.getFD();
- mp.setDataSource(fd);
- fis.close();
- t2 = SystemClock.uptimeMillis();
- mp.prepare();
- t3 = SystemClock.uptimeMillis();
- mp.start();
- t4 = SystemClock.uptimeMillis();
- Thread.sleep(10000);
- mp.pause();
- }catch (Exception e){}
- setDataSourceDuration = t2 -t1;
- prepareDuration = t3 - t2;
- startDuration = t4 - t3;
- totalSetDataTime = totalSetDataTime + setDataSourceDuration;
- totalPrepareTime = totalPrepareTime + prepareDuration;
- totalStartDuration = totalStartDuration + startDuration;
- mDB.execSQL("INSERT INTO perfdata (file, setdatatime, preparetime, playtime) VALUES (" + '"' + testFile[k] + '"' +','
- +setDataSourceDuration+ ',' + prepareDuration + ',' + startDuration +");");
- Log.v(TAG,"File name " + testFile[k]);
- mp.stop();
- mp.release();
- }
- Log.v (TAG, "setDataSource average " + totalSetDataTime/numberOfFiles);
- Log.v (TAG, "prepare average " + totalPrepareTime/numberOfFiles);
- Log.v (TAG, "start average " + totalStartDuration/numberOfFiles);
-
- }
-
- //Test cases for GetCurrentPosition
- @LargeTest
+ super.setUp();
+ }
+
+ public void createDB() {
+ mDB = SQLiteDatabase.openOrCreateDatabase("/sdcard/perf.db", null);
+ mDB.execSQL("CREATE TABLE perfdata (_id INTEGER PRIMARY KEY," +
+ "file TEXT," + "setdatatime LONG," + "preparetime LONG," +
+ "playtime LONG" + ");");
+ }
+
+ public void audioPlaybackStartupTime(String[] testFile) {
+ long t1 = 0;
+ long t2 = 0;
+ long t3 = 0;
+ long t4 = 0;
+ long setDataSourceDuration = 0;
+ long prepareDuration = 0;
+ long startDuration = 0;
+ long totalSetDataTime = 0;
+ long totalPrepareTime = 0;
+ long totalStartDuration = 0;
+
+ int numberOfFiles = testFile.length;
+ Log.v(TAG, "File length " + numberOfFiles);
+ for (int k = 0; k < numberOfFiles; k++) {
+ MediaPlayer mp = new MediaPlayer();
+ try {
+ t1 = SystemClock.uptimeMillis();
+ FileInputStream fis = new FileInputStream(testFile[k]);
+ FileDescriptor fd = fis.getFD();
+ mp.setDataSource(fd);
+ fis.close();
+ t2 = SystemClock.uptimeMillis();
+ mp.prepare();
+ t3 = SystemClock.uptimeMillis();
+ mp.start();
+ t4 = SystemClock.uptimeMillis();
+ } catch (Exception e) {
+ Log.v(TAG, e.toString());
+ }
+ setDataSourceDuration = t2 - t1;
+ prepareDuration = t3 - t2;
+ startDuration = t4 - t3;
+ totalSetDataTime = totalSetDataTime + setDataSourceDuration;
+ totalPrepareTime = totalPrepareTime + prepareDuration;
+ totalStartDuration = totalStartDuration + startDuration;
+ mDB.execSQL("INSERT INTO perfdata (file, setdatatime, preparetime," +
+ " playtime) VALUES (" + '"' + testFile[k] + '"' + ',' +
+ setDataSourceDuration + ',' + prepareDuration +
+ ',' + startDuration + ");");
+ Log.v(TAG, "File name " + testFile[k]);
+ mp.stop();
+ mp.release();
+ }
+ Log.v(TAG, "setDataSource average " + totalSetDataTime / numberOfFiles);
+ Log.v(TAG, "prepare average " + totalPrepareTime / numberOfFiles);
+ Log.v(TAG, "start average " + totalStartDuration / numberOfFiles);
+
+ }
+
+ @Suppress
public void testStartUpTime() throws Exception {
- createDB();
- audioPlaybackStartupTime(MediaNames.MP3FILES);
- audioPlaybackStartupTime(MediaNames.AACFILES);
-
- }
-
- public void wmametadatautility(String[] testFile){
- long t1 = 0;
- long t2 = 0;
- long sum = 0;
- long duration = 0;
- MediaMetadataRetriever retriever = new MediaMetadataRetriever();
- String value;
- for(int i = 0, n = testFile.length; i < n; ++i) {
- try {
- t1 = SystemClock.uptimeMillis();
- retriever.setDataSource(testFile[i]);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ALBUM);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COMPOSER);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_GENRE);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_YEAR);
- value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER);
- t2 = SystemClock.uptimeMillis();
- duration = t2 - t1;
- Log.v(TAG, "Time taken = " + duration);
- sum=sum+duration;
- }
- catch (Exception e){Log.v(TAG, e.getMessage());}
-
- }
- Log.v(TAG, "Average duration = " + sum/testFile.length);
- }
-
+ createDB();
+ audioPlaybackStartupTime(MediaNames.MP3FILES);
+ audioPlaybackStartupTime(MediaNames.AACFILES);
+
+ }
+
+ public void wmametadatautility(String[] testFile) {
+ long t1 = 0;
+ long t2 = 0;
+ long sum = 0;
+ long duration = 0;
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+ String value;
+ for (int i = 0, n = testFile.length; i < n; ++i) {
+ try {
+ t1 = SystemClock.uptimeMillis();
+ retriever.setDataSource(testFile[i]);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ALBUM);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COMPOSER);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_GENRE);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);
+ value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_YEAR);
+ value =
+ retriever
+ .extractMetadata(MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER);
+ t2 = SystemClock.uptimeMillis();
+ duration = t2 - t1;
+ Log.v(TAG, "Time taken = " + duration);
+ sum = sum + duration;
+ } catch (Exception e) {
+ Log.v(TAG, e.getMessage());
+ }
+
+ }
+ Log.v(TAG, "Average duration = " + sum / testFile.length);
+ }
+
+
+ // Note: This test is to assume the mediaserver's pid is 34
+ public void mediaStressPlayback(String testFilePath) {
+ for (int i = 0; i < NUM_PLAYBACk_IN_EACH_LOOP; i++) {
+ MediaPlayer mp = new MediaPlayer();
+ try {
+ mp.setDataSource(testFilePath);
+ mp.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder());
+ mp.prepare();
+ mp.start();
+ Thread.sleep(MEDIA_STRESS_WAIT_TIME);
+ mp.release();
+ } catch (Exception e) {
+ mp.release();
+ Log.v(TAG, e.toString());
+ }
+ }
+ }
+
+ // Note: This test is to assume the mediaserver's pid is 34
+ private void stressVideoRecord(int frameRate, int width, int height, int videoFormat,
+ int outFormat, String outFile, boolean videoOnly) {
+ // Video recording
+ for (int i = 0; i < NUM_PLAYBACk_IN_EACH_LOOP; i++) {
+ MediaRecorder mRecorder = new MediaRecorder();
+ try {
+ if (!videoOnly) {
+ Log.v(TAG, "setAudioSource");
+ mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+ }
+ mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+ mRecorder.setOutputFormat(outFormat);
+ Log.v(TAG, "output format " + outFormat);
+ mRecorder.setOutputFile(outFile);
+ mRecorder.setVideoFrameRate(frameRate);
+ mRecorder.setVideoSize(width, height);
+ Log.v(TAG, "setEncoder");
+ mRecorder.setVideoEncoder(videoFormat);
+ if (!videoOnly) {
+ mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
+ }
+ mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
+ mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
+ mRecorder.prepare();
+ mRecorder.start();
+ Thread.sleep(MEDIA_STRESS_WAIT_TIME);
+ mRecorder.stop();
+ mRecorder.release();
+ } catch (Exception e) {
+ Log.v("record video failed ", e.toString());
+ mRecorder.release();
+ }
+ }
+ }
+
+ public void stressAudioRecord(String filePath) {
+ // This test is only for the short media file
+ for (int i = 0; i < NUM_PLAYBACk_IN_EACH_LOOP; i++) {
+ MediaRecorder mRecorder = new MediaRecorder();
+ try {
+ mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+ mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
+ mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
+ mRecorder.setOutputFile(filePath);
+ mRecorder.prepare();
+ mRecorder.start();
+ Thread.sleep(MEDIA_STRESS_WAIT_TIME);
+ mRecorder.stop();
+ mRecorder.release();
+ } catch (Exception e) {
+ Log.v(TAG, e.toString());
+ mRecorder.release();
+ }
+ }
+ }
+
+ //Write the ps output to the file
+ public void getMemoryWriteToLog(Writer output) {
+ String cm = "ps mediaserver";
+ String memoryUsage = null;
+ int ch;
+ try {
+ Process p = Runtime.getRuntime().exec(cm);
+ InputStream in = p.getInputStream();
+ StringBuffer sb = new StringBuffer(512);
+ while ((ch = in.read()) != -1) {
+ sb.append((char) ch);
+ }
+ memoryUsage = sb.toString();
+ } catch (IOException e) {
+ Log.v(TAG, e.toString());
+ }
+
+ String[] poList = memoryUsage.split("\r|\n|\r\n");
+ String memusage = poList[1].concat("\n");
+ Log.v(TAG, memusage);
+ try {
+ //Write to file output
+ output.write(memusage);
+ } catch (Exception e) {
+ e.toString();
+ }
+ }
+
+
@Suppress
public void testWmaParseTime() throws Exception {
- // createDB();
- wmametadatautility(MediaNames.WMASUPPORTED);
+ // createDB();
+ wmametadatautility(MediaNames.WMASUPPORTED);
+ }
+
+
+ // Test case 1: Capture the memory usage after every 20 h263 playback
+ @LargeTest
+ public void testH263VideoPlaybackMemoryUsage() throws Exception {
+ File h263MemoryOut = new File(H263_VIDEO_PLAYBACK_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(h263MemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ mediaStressPlayback(MediaNames.VIDEO_HIGHRES_H263);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+
+ // Test case 2: Capture the memory usage after every 20 h264 playback
+ @LargeTest
+ public void testH264VideoPlaybackMemoryUsage() throws Exception {
+ File h264MemoryOut = new File(H264_VIDEO_PLAYBACK_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(h264MemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ mediaStressPlayback(MediaNames.VIDEO_H264_AMR);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+
+ // Test case 3: Capture the memory usage after each 20 WMV playback
+ @LargeTest
+ public void testWMVVideoPlaybackMemoryUsage() throws Exception {
+ File wmvMemoryOut = new File(WMV_VIDEO_PLAYBACK_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(wmvMemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ mediaStressPlayback(MediaNames.VIDEO_WMV);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+
+ // Test case 4: Capture the memory usage after every 20 video only recorded
+ @LargeTest
+ public void testH263RecordVideoOnlyMemoryUsage() throws Exception {
+ File videoH263RecordOnlyMemoryOut = new File(H263_VIDEO_ONLY_RECORD_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(videoH263RecordOnlyMemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ stressVideoRecord(20, 352, 288, MediaRecorder.VideoEncoder.H263,
+ MediaRecorder.OutputFormat.MPEG_4, MediaNames.RECORDED_VIDEO_3GP, true);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+
+ // Test case 5: Capture the memory usage after every 20 video only recorded
+ @LargeTest
+ public void testMpeg4RecordVideoOnlyMemoryUsage() throws Exception {
+ File videoMp4RecordOnlyMemoryOut = new File(MP4_VIDEO_ONLY_RECORD_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(videoMp4RecordOnlyMemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ stressVideoRecord(20, 352, 288, MediaRecorder.VideoEncoder.MPEG_4_SP,
+ MediaRecorder.OutputFormat.MPEG_4, MediaNames.RECORDED_VIDEO_3GP, true);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+
+ // Test case 6: Capture the memory usage after every 20 video and audio recorded
+ @LargeTest
+ public void testRecordVidedAudioMemoryUsage() throws Exception {
+ File videoRecordAudioMemoryOut = new File(H263_VIDEO_AUDIO_RECORD_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(videoRecordAudioMemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ stressVideoRecord(20, 352, 288, MediaRecorder.VideoEncoder.H263,
+ MediaRecorder.OutputFormat.MPEG_4, MediaNames.RECORDED_VIDEO_3GP, false);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
}
-
-
-}
+ // Test case 7: Capture the memory usage after every 20 audio only recorded
+ @LargeTest
+ public void testRecordAudioOnlyMemoryUsage() throws Exception {
+ File audioOnlyMemoryOut = new File(AUDIO_ONLY_RECORD_MEMOUT);
+ Writer output = new BufferedWriter(new FileWriter(audioOnlyMemoryOut));
+ for (int i = 0; i < NUM_STRESS_LOOP; i++) {
+ stressAudioRecord(MediaNames.RECORDER_OUTPUT);
+ getMemoryWriteToLog(output);
+ }
+ output.close();
+ }
+}