summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioManager.java144
-rw-r--r--media/java/android/media/AudioService.java20
-rw-r--r--media/java/android/media/IRemoteControlClient.aidl60
-rw-r--r--media/java/android/media/audiofx/AudioEffect.java15
-rw-r--r--media/java/android/media/audiofx/BassBoost.java13
-rw-r--r--media/java/android/media/audiofx/Equalizer.java15
-rw-r--r--media/java/android/media/audiofx/Virtualizer.java16
-rw-r--r--media/java/android/media/videoeditor/MediaArtistNativeHelper.java93
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaImageItem.java26
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaItem.java38
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaVideoItem.java18
-rwxr-xr-xmedia/jni/mediaeditor/VideoBrowserInternal.h3
-rwxr-xr-xmedia/jni/mediaeditor/VideoBrowserMain.c19
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorMain.cpp86
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp18
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp33
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h2
-rw-r--r--media/libstagefright/ACodec.cpp74
-rw-r--r--media/libstagefright/Android.mk6
-rw-r--r--media/libstagefright/AwesomePlayer.cpp2
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java28
21 files changed, 448 insertions, 281 deletions
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 56a9933..7a92b35 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -1743,7 +1743,13 @@ public class AudioManager {
/**
* @hide
- * @param eventReceiver
+ * Unregisters the remote control client that was providing information to display on the
+ * remotes.
+ * @param eventReceiver identifier of a {@link android.content.BroadcastReceiver}
+ * that receives the media button intent, and associated with the remote control
+ * client.
+ * @see #registerRemoteControlClient(ComponentName)
+
*/
public void unregisterRemoteControlClient(ComponentName eventReceiver) {
if (eventReceiver == null) {
@@ -1783,27 +1789,152 @@ public class AudioManager {
* Definitions of constants to be used in {@link android.media.IRemoteControlClient}.
*/
public final class RemoteControlParameters {
+ /**
+ * Playback state of an IRemoteControlClient which is stopped.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_STOPPED = 1;
+ /**
+ * Playback state of an IRemoteControlClient which is paused.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_PAUSED = 2;
+ /**
+ * Playback state of an IRemoteControlClient which is playing media.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_PLAYING = 3;
+ /**
+ * Playback state of an IRemoteControlClient which is fast forwarding in the media
+ * it is currently playing.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_FAST_FORWARDING = 4;
+ /**
+ * Playback state of an IRemoteControlClient which is fast rewinding in the media
+ * it is currently playing.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_REWINDING = 5;
+ /**
+ * Playback state of an IRemoteControlClient which is skipping to the next
+ * logical chapter (such as a song in a playlist) in the media it is currently playing.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_SKIPPING_FORWARDS = 6;
+ /**
+ * Playback state of an IRemoteControlClient which is skipping back to the previous
+ * logical chapter (such as a song in a playlist) in the media it is currently playing.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_SKIPPING_BACKWARDS = 7;
+ /**
+ * Playback state of an IRemoteControlClient which is buffering data to play before it can
+ * start or resume playback.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
public final static int PLAYSTATE_BUFFERING = 8;
+ /**
+ * Playback state of an IRemoteControlClient which cannot perform any playback related
+ * operation because of an internal error. Examples of such situations are no network
+ * connectivity when attempting to stream data from a server, or expired user credentials
+ * when trying to play subscription-based content.
+ *
+ * @see android.media.IRemoteControlClient#getPlaybackState()
+ */
+ public final static int PLAYSTATE_ERROR = 9;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "previous" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PREVIOUS
+ */
public final static int FLAG_KEY_MEDIA_PREVIOUS = 1 << 0;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "rewing" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_REWIND
+ */
public final static int FLAG_KEY_MEDIA_REWIND = 1 << 1;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "play" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PLAY
+ */
public final static int FLAG_KEY_MEDIA_PLAY = 1 << 2;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "play/pause" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PLAY_PAUSE
+ */
public final static int FLAG_KEY_MEDIA_PLAY_PAUSE = 1 << 3;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "pause" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PAUSE
+ */
public final static int FLAG_KEY_MEDIA_PAUSE = 1 << 4;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "stop" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_STOP
+ */
public final static int FLAG_KEY_MEDIA_STOP = 1 << 5;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "fast forward" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_FAST_FORWARD
+ */
public final static int FLAG_KEY_MEDIA_FAST_FORWARD = 1 << 6;
+ /**
+ * Flag indicating an IRemoteControlClient makes use of the "next" media key.
+ *
+ * @see android.media.IRemoteControlClient#getTransportControlFlags()
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_NEXT
+ */
public final static int FLAG_KEY_MEDIA_NEXT = 1 << 7;
+ /**
+ * Flag used to signal that the metadata exposed by the IRemoteControlClient has changed.
+ *
+ * @see #notifyRemoteControlInformationChanged(ComponentName, int)
+ */
public final static int FLAG_INFORMATION_CHANGED_METADATA = 1 << 0;
+ /**
+ * Flag used to signal that the transport control buttons supported by the
+ * IRemoteControlClient have changed.
+ * This can for instance happen when playback is at the end of a playlist, and the "next"
+ * operation is not supported anymore.
+ *
+ * @see #notifyRemoteControlInformationChanged(ComponentName, int)
+ */
public final static int FLAG_INFORMATION_CHANGED_KEY_MEDIA = 1 << 1;
+ /**
+ * Flag used to signal that the playback state of the IRemoteControlClient has changed.
+ *
+ * @see #notifyRemoteControlInformationChanged(ComponentName, int)
+ */
public final static int FLAG_INFORMATION_CHANGED_PLAYSTATE = 1 << 2;
+ /**
+ * Flag used to signal that the album art for the IRemoteControlClient has changed.
+ *
+ * @see #notifyRemoteControlInformationChanged(ComponentName, int)
+ */
public final static int FLAG_INFORMATION_CHANGED_ALBUM_ART = 1 << 3;
}
@@ -1830,6 +1961,17 @@ public class AudioManager {
/**
* @hide
+ * The media button event receiver associated with the IRemoteControlClient.
+ * The {@link android.content.ComponentName} value of the event receiver can be retrieved with
+ * {@link android.content.ComponentName#unflattenFromString(String)}
+ *
+ * @see #REMOTE_CONTROL_CLIENT_CHANGED_ACTION
+ */
+ public static final String EXTRA_REMOTE_CONTROL_EVENT_RECEIVER =
+ "android.media.EXTRA_REMOTE_CONTROL_EVENT_RECEIVER";
+
+ /**
+ * @hide
* The flags describing what information has changed in the current remote control client.
*
* @see #REMOTE_CONTROL_CLIENT_CHANGED_ACTION
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 5951229..ff2e66b 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -17,6 +17,7 @@
package android.media;
import android.app.ActivityManagerNative;
+import android.app.KeyguardManager;
import android.bluetooth.BluetoothA2dp;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothClass;
@@ -309,6 +310,8 @@ public class AudioService extends IAudioService.Stub {
private static final int NOTIFICATION_VOLUME_DELAY_MS = 5000;
// previous volume adjustment direction received by checkForRingerModeChange()
private int mPrevVolDirection = AudioManager.ADJUST_SAME;
+ // Keyguard manager proxy
+ private KeyguardManager mKeyguardManager;
///////////////////////////////////////////////////////////////////////////
// Construction
@@ -492,8 +495,10 @@ public class AudioService extends IAudioService.Stub {
streamType = getActiveStreamType(suggestedStreamType);
}
- // Don't play sound on other streams
- if (streamType != AudioSystem.STREAM_RING && (flags & AudioManager.FLAG_PLAY_SOUND) != 0) {
+ // Play sounds on STREAM_RING only and if lock screen is not on.
+ if ((flags & AudioManager.FLAG_PLAY_SOUND) != 0 &&
+ ((STREAM_VOLUME_ALIAS[streamType] != AudioSystem.STREAM_RING) ||
+ (mKeyguardManager != null && mKeyguardManager.isKeyguardLocked()))) {
flags &= ~AudioManager.FLAG_PLAY_SOUND;
}
@@ -2167,8 +2172,10 @@ public class AudioService extends IAudioService.Stub {
case MSG_RCDISPLAY_UPDATE:
synchronized(mCurrentRcLock) {
+ // msg.obj is guaranteed to be non null
+ RemoteControlStackEntry rcse = (RemoteControlStackEntry)msg.obj;
if ((mCurrentRcClient == null) ||
- (!mCurrentRcClient.equals((IRemoteControlClient)msg.obj))) {
+ (!mCurrentRcClient.equals(rcse.mRcClient))) {
// the remote control display owner has changed between the
// the message to update the display was sent, and the time it
// gets to be processed (now)
@@ -2183,6 +2190,9 @@ public class AudioService extends IAudioService.Stub {
rcClientIntent.putExtra(
AudioManager.EXTRA_REMOTE_CONTROL_CLIENT_INFO_CHANGED,
msg.arg1);
+ rcClientIntent.putExtra(
+ AudioManager.EXTRA_REMOTE_CONTROL_EVENT_RECEIVER,
+ rcse.mReceiverComponent.flattenToString());
rcClientIntent.setFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY);
mContext.sendBroadcast(rcClientIntent);
}
@@ -2508,6 +2518,8 @@ public class AudioService extends IAudioService.Stub {
sendMsg(mAudioHandler, MSG_LOAD_SOUND_EFFECTS, SHARED_MSG, SENDMSG_NOOP,
0, 0, null, 0);
+ mKeyguardManager =
+ (KeyguardManager)mContext.getSystemService(Context.KEYGUARD_SERVICE);
mScoConnectionState = AudioManager.SCO_AUDIO_STATE_ERROR;
resetBluetoothSco();
getBluetoothHeadset();
@@ -3131,7 +3143,7 @@ public class AudioService extends IAudioService.Stub {
mCurrentRcClient = rcse.mRcClient;
}
mAudioHandler.sendMessage( mAudioHandler.obtainMessage(MSG_RCDISPLAY_UPDATE,
- infoFlagsAboutToBeUsed /* arg1 */, 0, rcse.mRcClient /* obj */) );
+ infoFlagsAboutToBeUsed /* arg1 */, 0, rcse /* obj, != null */) );
}
/**
diff --git a/media/java/android/media/IRemoteControlClient.aidl b/media/java/android/media/IRemoteControlClient.aidl
index a49371c..76d178c 100644
--- a/media/java/android/media/IRemoteControlClient.aidl
+++ b/media/java/android/media/IRemoteControlClient.aidl
@@ -19,7 +19,12 @@ package android.media;
import android.graphics.Bitmap;
/**
- * {@hide}
+ * @hide
+ * Interface for an object that exposes information meant to be consumed by remote controls
+ * capable of displaying metadata, album art and media transport control buttons.
+ * Such a remote control client object is associated with a media button event receiver
+ * when registered through
+ * {@link AudioManager#registerRemoteControlClient(ComponentName, IRemoteControlClient)}.
*/
interface IRemoteControlClient
{
@@ -41,36 +46,49 @@ interface IRemoteControlClient
* {@link android.media.MediaMetadataRetriever#METADATA_KEY_TITLE},
* {@link android.media.MediaMetadataRetriever#METADATA_KEY_WRITER},
* {@link android.media.MediaMetadataRetriever#METADATA_KEY_YEAR}.
- * @return null if the given field is not supported, or the String matching the metadata field.
+ * @return null if the requested field is not supported, or the String matching the
+ * metadata field.
*/
String getMetadataString(int field);
/**
- * Returns the current playback state.
+ * Called by a remote control to retrieve the current playback state.
* @return one of the following values:
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_STOPPED},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_PAUSED},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_PLAYING},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_FAST_FORWARDING},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_REWINDING},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_SKIPPING_FORWARDS},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_SKIPPING_BACKWARDS},
- * {@link android.media.AudioManager.RemoteControl#PLAYSTATE_BUFFERING}.
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_STOPPED},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_PAUSED},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_PLAYING},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_FAST_FORWARDING},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_REWINDING},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_SKIPPING_FORWARDS},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_SKIPPING_BACKWARDS},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_BUFFERING},
+ * {@link android.media.AudioManager.RemoteControlParameters#PLAYSTATE_ERROR}.
*/
int getPlaybackState();
/**
- * Returns the flags for the media transport control buttons this client supports.
- * @see {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_PREVIOUS},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_REWIND},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_PLAY},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_PLAY_PAUSE},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_PAUSE},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_STOP},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_FAST_FORWARD},
- * {@link android.media.AudioManager.RemoteControl#FLAG_KEY_MEDIA_NEXT}
+ * Called by a remote control to retrieve the flags for the media transport control buttons
+ * that this client supports.
+ * @see {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_PREVIOUS},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_REWIND},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_PLAY},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_PLAY_PAUSE},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_PAUSE},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_STOP},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_FAST_FORWARD},
+ * {@link android.media.AudioManager.RemoteControlParameters#FLAG_KEY_MEDIA_NEXT}
*/
int getTransportControlFlags();
- Bitmap getAlbumArt(int width, int height);
+ /**
+ * Called by a remote control to retrieve the album art picture at the requested size.
+ * Note that returning a bitmap smaller than the maximum requested dimension is accepted
+ * and it will be scaled as needed, but exceeding the maximum dimensions may produce
+ * unspecified results, such as the image being cropped or simply not being displayed.
+ * @param maxWidth the maximum width of the requested bitmap expressed in pixels.
+ * @param maxHeight the maximum height of the requested bitmap expressed in pixels.
+ * @return the bitmap for the album art, or null if there isn't any.
+ * @see android.graphics.Bitmap
+ */
+ Bitmap getAlbumArt(int maxWidth, int maxHeight);
}
diff --git a/media/java/android/media/audiofx/AudioEffect.java b/media/java/android/media/audiofx/AudioEffect.java
index 3ac0104..673f9f4 100644
--- a/media/java/android/media/audiofx/AudioEffect.java
+++ b/media/java/android/media/audiofx/AudioEffect.java
@@ -40,13 +40,11 @@ import java.util.UUID;
* <li> {@link android.media.audiofx.PresetReverb}</li>
* <li> {@link android.media.audiofx.EnvironmentalReverb}</li>
* </ul>
- * <p>If the audio effect is to be applied to a specific AudioTrack or MediaPlayer instance,
+ * <p>To apply the audio effect to a specific AudioTrack or MediaPlayer instance,
* the application must specify the audio session ID of that instance when creating the AudioEffect.
* (see {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions).
- * To apply an effect to the global audio output mix, session 0 must be specified when creating the
- * AudioEffect.
- * <p>Creating an effect on the output mix (audio session 0) requires permission
- * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * <p>NOTE: attaching insert effects (equalizer, bass boost, virtualizer) to the global audio output
+ * mix by use of session 0 is deprecated.
* <p>Creating an AudioEffect object will create the corresponding effect engine in the audio
* framework if no instance of the same effect type exists in the specified audio session.
* If one exists, this instance will be used.
@@ -356,10 +354,9 @@ public class AudioEffect {
* how much the requesting application needs control of effect
* parameters. The normal priority is 0, above normal is a
* positive number, below normal a negative number.
- * @param audioSession system wide unique audio session identifier. If audioSession
- * is not 0, the effect will be attached to the MediaPlayer or
- * AudioTrack in the same audio session. Otherwise, the effect
- * will apply to the output mix.
+ * @param audioSession system wide unique audio session identifier.
+ * The effect will be attached to the MediaPlayer or AudioTrack in
+ * the same audio session.
*
* @throws java.lang.IllegalArgumentException
* @throws java.lang.UnsupportedOperationException
diff --git a/media/java/android/media/audiofx/BassBoost.java b/media/java/android/media/audiofx/BassBoost.java
index ca55f0f..91459ed 100644
--- a/media/java/android/media/audiofx/BassBoost.java
+++ b/media/java/android/media/audiofx/BassBoost.java
@@ -39,9 +39,7 @@ import java.util.StringTokenizer;
* for the SLBassBoostItf interface. Please refer to this specification for more details.
* <p>To attach the BassBoost to a particular AudioTrack or MediaPlayer, specify the audio session
* ID of this AudioTrack or MediaPlayer when constructing the BassBoost.
- * If the audio session ID 0 is specified, the BassBoost applies to the main audio output mix.
- * <p>Creating a BassBoost on the output mix (audio session 0) requires permission
- * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * <p>NOTE: attaching a BassBoost to the global audio output mix by use of session 0 is deprecated.
* <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
* <p>See {@link android.media.audiofx.AudioEffect} class for more details on
* controlling audio effects.
@@ -89,9 +87,8 @@ public class BassBoost extends AudioEffect {
* engine. As the same engine can be shared by several applications, this parameter indicates
* how much the requesting application needs control of effect parameters. The normal priority
* is 0, above normal is a positive number, below normal a negative number.
- * @param audioSession system wide unique audio session identifier. If audioSession
- * is not 0, the BassBoost will be attached to the MediaPlayer or AudioTrack in the
- * same audio session. Otherwise, the BassBoost will apply to the output mix.
+ * @param audioSession system wide unique audio session identifier. The BassBoost will be
+ * attached to the MediaPlayer or AudioTrack in the same audio session.
*
* @throws java.lang.IllegalStateException
* @throws java.lang.IllegalArgumentException
@@ -103,6 +100,10 @@ public class BassBoost extends AudioEffect {
UnsupportedOperationException, RuntimeException {
super(EFFECT_TYPE_BASS_BOOST, EFFECT_TYPE_NULL, priority, audioSession);
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a BassBoost to global output mix is deprecated!");
+ }
+
int[] value = new int[1];
checkStatus(getParameter(PARAM_STRENGTH_SUPPORTED, value));
mStrengthSupported = (value[0] != 0);
diff --git a/media/java/android/media/audiofx/Equalizer.java b/media/java/android/media/audiofx/Equalizer.java
index b3bafa9..7f38955 100644
--- a/media/java/android/media/audiofx/Equalizer.java
+++ b/media/java/android/media/audiofx/Equalizer.java
@@ -39,10 +39,8 @@ import java.util.StringTokenizer;
* mapping those defined by the OpenSL ES 1.0.1 Specification (http://www.khronos.org/opensles/)
* for the SLEqualizerItf interface. Please refer to this specification for more details.
* <p>To attach the Equalizer to a particular AudioTrack or MediaPlayer, specify the audio session
- * ID of this AudioTrack or MediaPlayer when constructing the Equalizer. If the audio session ID 0
- * is specified, the Equalizer applies to the main audio output mix.
- * <p>Creating an Equalizer on the output mix (audio session 0) requires permission
- * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * ID of this AudioTrack or MediaPlayer when constructing the Equalizer.
+ * <p>NOTE: attaching an Equalizer to the global audio output mix by use of session 0 is deprecated.
* <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
* <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling audio
* effects.
@@ -134,9 +132,8 @@ public class Equalizer extends AudioEffect {
* engine. As the same engine can be shared by several applications, this parameter indicates
* how much the requesting application needs control of effect parameters. The normal priority
* is 0, above normal is a positive number, below normal a negative number.
- * @param audioSession system wide unique audio session identifier. If audioSession
- * is not 0, the Equalizer will be attached to the MediaPlayer or AudioTrack in the
- * same audio session. Otherwise, the Equalizer will apply to the output mix.
+ * @param audioSession system wide unique audio session identifier. The Equalizer will be
+ * attached to the MediaPlayer or AudioTrack in the same audio session.
*
* @throws java.lang.IllegalStateException
* @throws java.lang.IllegalArgumentException
@@ -148,6 +145,10 @@ public class Equalizer extends AudioEffect {
UnsupportedOperationException, RuntimeException {
super(EFFECT_TYPE_EQUALIZER, EFFECT_TYPE_NULL, priority, audioSession);
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching an Equalizer to global output mix is deprecated!");
+ }
+
getNumberOfBands();
mNumPresets = (int)getNumberOfPresets();
diff --git a/media/java/android/media/audiofx/Virtualizer.java b/media/java/android/media/audiofx/Virtualizer.java
index a682a45..68a7b88 100644
--- a/media/java/android/media/audiofx/Virtualizer.java
+++ b/media/java/android/media/audiofx/Virtualizer.java
@@ -40,10 +40,9 @@ import java.util.StringTokenizer;
* mapping those defined by the OpenSL ES 1.0.1 Specification (http://www.khronos.org/opensles/)
* for the SLVirtualizerItf interface. Please refer to this specification for more details.
* <p>To attach the Virtualizer to a particular AudioTrack or MediaPlayer, specify the audio session
- * ID of this AudioTrack or MediaPlayer when constructing the Virtualizer. If the audio session ID 0
- * is specified, the Virtualizer applies to the main audio output mix.
- * <p>Creating a Virtualizer on the output mix (audio session 0) requires permission
- * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * ID of this AudioTrack or MediaPlayer when constructing the Virtualizer.
+ * <p>NOTE: attaching a Virtualizer to the global audio output mix by use of session 0 is
+ * deprecated.
* <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
* <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling
* audio effects.
@@ -90,9 +89,8 @@ public class Virtualizer extends AudioEffect {
* engine. As the same engine can be shared by several applications, this parameter indicates
* how much the requesting application needs control of effect parameters. The normal priority
* is 0, above normal is a positive number, below normal a negative number.
- * @param audioSession system wide unique audio session identifier. If audioSession
- * is not 0, the Virtualizer will be attached to the MediaPlayer or AudioTrack in the
- * same audio session. Otherwise, the Virtualizer will apply to the output mix.
+ * @param audioSession system wide unique audio session identifier. The Virtualizer will
+ * be attached to the MediaPlayer or AudioTrack in the same audio session.
*
* @throws java.lang.IllegalStateException
* @throws java.lang.IllegalArgumentException
@@ -104,6 +102,10 @@ public class Virtualizer extends AudioEffect {
UnsupportedOperationException, RuntimeException {
super(EFFECT_TYPE_VIRTUALIZER, EFFECT_TYPE_NULL, priority, audioSession);
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a Virtualizer to global output mix is deprecated!");
+ }
+
int[] value = new int[1];
checkStatus(getParameter(PARAM_STRENGTH_SUPPORTED, value));
mStrengthSupported = (value[0] != 0);
diff --git a/media/java/android/media/videoeditor/MediaArtistNativeHelper.java b/media/java/android/media/videoeditor/MediaArtistNativeHelper.java
index 5bfdcdb..8caa04c 100644
--- a/media/java/android/media/videoeditor/MediaArtistNativeHelper.java
+++ b/media/java/android/media/videoeditor/MediaArtistNativeHelper.java
@@ -3781,72 +3781,62 @@ class MediaArtistNativeHelper {
* @param startMs The starting time in ms
* @param endMs The end time in ms
* @param thumbnailCount The number of frames to be extracted
+ * @param indices The indices of thumbnails wanted
+ * @param callback The callback used to pass back the bitmaps
* from startMs to endMs
*
* @return The frames as bitmaps in bitmap array
**/
- Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
- int thumbnailCount) {
- int[] rgb888 = null;
- int thumbnailSize = 0;
- Bitmap tempBitmap = null;
-
+ void getPixelsList(String filename, final int width, final int height,
+ long startMs, long endMs, int thumbnailCount, int[] indices,
+ final MediaItem.GetThumbnailListCallback callback) {
/* Make width and height as even */
final int newWidth = (width + 1) & 0xFFFFFFFE;
final int newHeight = (height + 1) & 0xFFFFFFFE;
- thumbnailSize = newWidth * newHeight * 4;
+ final int thumbnailSize = newWidth * newHeight;
/* Create a temp bitmap for resized thumbnails */
- if ((newWidth != width) || (newHeight != height)) {
- tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
- }
- int i = 0;
- int deltaTime = (int)(endMs - startMs) / thumbnailCount;
- Bitmap[] bitmaps = null;
-
- try {
- // This may result in out of Memory Error
- rgb888 = new int[thumbnailSize * thumbnailCount];
- bitmaps = new Bitmap[thumbnailCount];
- } catch (Throwable e) {
- // Allocating to new size with Fixed count
- try {
- rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
- bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED];
- thumbnailCount = MAX_THUMBNAIL_PERMITTED;
- } catch (Throwable ex) {
- throw new RuntimeException("Memory allocation fails, thumbnail count too large: "
- + thumbnailCount);
- }
- }
- IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
- nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount,
- startMs, endMs);
+ final Bitmap tempBitmap =
+ (newWidth != width || newHeight != height)
+ ? Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888)
+ : null;
+
+ final int[] rgb888 = new int[thumbnailSize];
+ final IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
+ nativeGetPixelsList(filename, rgb888, newWidth, newHeight,
+ thumbnailCount, startMs, endMs, indices,
+ new NativeGetPixelsListCallback() {
+ public void onThumbnail(int index) {
+ Bitmap bitmap = Bitmap.createBitmap(
+ width, height, Bitmap.Config.ARGB_8888);
+ tmpBuffer.put(rgb888, 0, thumbnailSize);
+ tmpBuffer.rewind();
+
+ if ((newWidth == width) && (newHeight == height)) {
+ bitmap.copyPixelsFromBuffer(tmpBuffer);
+ } else {
+ /* Copy the out rgb buffer to temp bitmap */
+ tempBitmap.copyPixelsFromBuffer(tmpBuffer);
- for (; i < thumbnailCount; i++) {
- bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
- tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
- tmpBuffer.rewind();
+ /* Create a canvas to resize */
+ final Canvas canvas = new Canvas(bitmap);
+ canvas.drawBitmap(tempBitmap,
+ new Rect(0, 0, newWidth, newHeight),
+ new Rect(0, 0, width, height), sResizePaint);
- if ((newWidth == width) && (newHeight == height)) {
- bitmaps[i].copyPixelsFromBuffer(tmpBuffer);
- } else {
- /* Copy the out rgb buffer to temp bitmap */
- tempBitmap.copyPixelsFromBuffer(tmpBuffer);
-
- /* Create a canvas to resize */
- final Canvas canvas = new Canvas(bitmaps[i]);
- canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
- new Rect(0, 0, width, height), sResizePaint);
- canvas.setBitmap(null);
+ canvas.setBitmap(null);
+ }
+ callback.onThumbnail(bitmap, index);
}
- }
+ });
if (tempBitmap != null) {
tempBitmap.recycle();
}
+ }
- return bitmaps;
+ interface NativeGetPixelsListCallback {
+ public void onThumbnail(int index);
}
/**
@@ -3957,8 +3947,9 @@ class MediaArtistNativeHelper {
private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
long timeMS);
- private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
- int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
+ private native int nativeGetPixelsList(String fileName, int[] pixelArray,
+ int width, int height, int nosofTN, long startTimeMs, long endTimeMs,
+ int[] indices, NativeGetPixelsListCallback callback);
/**
* Releases the JNI and cleans up the core native module.. Should be called
diff --git a/media/java/android/media/videoeditor/MediaImageItem.java b/media/java/android/media/videoeditor/MediaImageItem.java
index f0cc1fe..4ca6fad 100755
--- a/media/java/android/media/videoeditor/MediaImageItem.java
+++ b/media/java/android/media/videoeditor/MediaImageItem.java
@@ -616,17 +616,18 @@ public class MediaImageItem extends MediaItem {
* {@inheritDoc}
*/
@Override
- public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
- int thumbnailCount) throws IOException {
+ public void getThumbnailList(int width, int height,
+ long startMs, long endMs,
+ int thumbnailCount,
+ int[] indices,
+ GetThumbnailListCallback callback)
+ throws IOException {
//KenBurns was not applied on this.
if (getGeneratedImageClip() == null) {
final Bitmap thumbnail = scaleImage(mFilename, width, height);
- final Bitmap[] thumbnailArray = new Bitmap[thumbnailCount];
- for (int i = 0; i < thumbnailCount; i++) {
- thumbnailArray[i] = thumbnail;
+ for (int i = 0; i < indices.length; i++) {
+ callback.onThumbnail(thumbnail, i);
}
-
- return thumbnailArray;
} else {
if (startMs > endMs) {
throw new IllegalArgumentException("Start time is greater than end time");
@@ -636,15 +637,8 @@ public class MediaImageItem extends MediaItem {
throw new IllegalArgumentException("End time is greater than file duration");
}
- if (startMs == endMs) {
- Bitmap[] bitmap = new Bitmap[1];
- bitmap[0] = mMANativeHelper.getPixels(getGeneratedImageClip(),
- width, height,startMs);
- return bitmap;
- }
-
- return mMANativeHelper.getPixelsList(getGeneratedImageClip(), width,
- height,startMs,endMs,thumbnailCount);
+ mMANativeHelper.getPixelsList(getGeneratedImageClip(), width,
+ height, startMs, endMs, thumbnailCount, indices, callback);
}
}
diff --git a/media/java/android/media/videoeditor/MediaItem.java b/media/java/android/media/videoeditor/MediaItem.java
index 8c4841f..4e9ea75 100755
--- a/media/java/android/media/videoeditor/MediaItem.java
+++ b/media/java/android/media/videoeditor/MediaItem.java
@@ -564,15 +564,41 @@ public abstract class MediaItem {
* @param startMs The start of time range in milliseconds
* @param endMs The end of the time range in milliseconds
* @param thumbnailCount The thumbnail count
- *
- * @return The array of Bitmaps
+ * @param indices The indices of the thumbnails wanted
+ * @param callback The callback used to pass back the bitmaps
*
* @throws IOException if a file error occurs
*/
- public abstract Bitmap[] getThumbnailList(int width, int height,
- long startMs, long endMs,
- int thumbnailCount)
- throws IOException;
+ public abstract void getThumbnailList(int width, int height,
+ long startMs, long endMs,
+ int thumbnailCount,
+ int[] indices,
+ GetThumbnailListCallback callback)
+ throws IOException;
+
+ public interface GetThumbnailListCallback {
+ public void onThumbnail(Bitmap bitmap, int index);
+ }
+
+ // This is for compatibility, only used in tests.
+ public Bitmap[] getThumbnailList(int width, int height,
+ long startMs, long endMs,
+ int thumbnailCount)
+ throws IOException {
+ final Bitmap[] bitmaps = new Bitmap[thumbnailCount];
+ int[] indices = new int[thumbnailCount];
+ for (int i = 0; i < thumbnailCount; i++) {
+ indices[i] = i;
+ }
+ getThumbnailList(width, height, startMs, endMs,
+ thumbnailCount, indices, new GetThumbnailListCallback() {
+ public void onThumbnail(Bitmap bitmap, int index) {
+ bitmaps[index] = bitmap;
+ }
+ });
+
+ return bitmaps;
+ }
/*
* {@inheritDoc}
diff --git a/media/java/android/media/videoeditor/MediaVideoItem.java b/media/java/android/media/videoeditor/MediaVideoItem.java
index 6248651..0ac354b 100755
--- a/media/java/android/media/videoeditor/MediaVideoItem.java
+++ b/media/java/android/media/videoeditor/MediaVideoItem.java
@@ -293,8 +293,12 @@ public class MediaVideoItem extends MediaItem {
* {@inheritDoc}
*/
@Override
- public Bitmap[] getThumbnailList(int width, int height, long startMs,
- long endMs, int thumbnailCount) throws IOException {
+ public void getThumbnailList(int width, int height,
+ long startMs, long endMs,
+ int thumbnailCount,
+ int[] indices,
+ GetThumbnailListCallback callback)
+ throws IOException {
if (startMs > endMs) {
throw new IllegalArgumentException("Start time is greater than end time");
}
@@ -307,14 +311,8 @@ public class MediaVideoItem extends MediaItem {
throw new IllegalArgumentException("Invalid dimension");
}
- if (startMs == endMs) {
- final Bitmap[] bitmap = new Bitmap[1];
- bitmap[0] = mMANativeHelper.getPixels(super.getFilename(), width, height,startMs);
- return bitmap;
- }
-
- return mMANativeHelper.getPixelsList(super.getFilename(), width,
- height,startMs,endMs,thumbnailCount);
+ mMANativeHelper.getPixelsList(super.getFilename(), width,
+ height, startMs, endMs, thumbnailCount, indices, callback);
}
/*
diff --git a/media/jni/mediaeditor/VideoBrowserInternal.h b/media/jni/mediaeditor/VideoBrowserInternal.h
index 3cfb6b9..f4eaab8 100755
--- a/media/jni/mediaeditor/VideoBrowserInternal.h
+++ b/media/jni/mediaeditor/VideoBrowserInternal.h
@@ -26,9 +26,6 @@
#define VIDEO_BROWSER_BGR565
-
-#define VIDEO_BROWSER_PREDECODE_TIME 2000 /* In miliseconds */
-
/*---------------------------- MACROS ----------------------------*/
#define CHECK_PTR(fct, p, err, errValue) \
{ \
diff --git a/media/jni/mediaeditor/VideoBrowserMain.c b/media/jni/mediaeditor/VideoBrowserMain.c
index 2de55e3..c6c6000 100755
--- a/media/jni/mediaeditor/VideoBrowserMain.c
+++ b/media/jni/mediaeditor/VideoBrowserMain.c
@@ -447,13 +447,9 @@ M4OSA_ERR videoBrowserPrepareFrame(M4OSA_Context pContext, M4OSA_UInt32* pTime,
VideoBrowserContext* pC = (VideoBrowserContext*)pContext;
M4OSA_ERR err = M4NO_ERROR;
M4OSA_UInt32 targetTime = 0;
- M4OSA_UInt32 jumpTime = 0;
M4_MediaTime timeMS = 0;
- M4OSA_Int32 rapTime = 0;
- M4OSA_Bool isBackward = M4OSA_FALSE;
M4OSA_Bool bJumpNeeded = M4OSA_FALSE;
-
/*--- Sanity checks ---*/
CHECK_PTR(videoBrowserPrepareFrame, pContext, err, M4ERR_PARAMETER);
CHECK_PTR(videoBrowserPrepareFrame, pTime, err, M4ERR_PARAMETER);
@@ -472,16 +468,11 @@ M4OSA_ERR videoBrowserPrepareFrame(M4OSA_Context pContext, M4OSA_UInt32* pTime,
goto videoBrowserPrepareFrame_cleanUp;
}
- /*--- Check the duration ---*/
- /*--- If we jump backward, we need to jump ---*/
- if (targetTime < pC->m_currentCTS)
- {
- isBackward = M4OSA_TRUE;
- bJumpNeeded = M4OSA_TRUE;
- }
- /*--- If we jumpt to a time greater than "currentTime" + "predecodeTime"
- we need to jump ---*/
- else if (targetTime > (pC->m_currentCTS + VIDEO_BROWSER_PREDECODE_TIME))
+ // If we jump backward or forward to a time greater than current position by
+ // 85ms (~ 2 frames), we want to jump.
+ if (pC->m_currentCTS == 0 ||
+ targetTime < pC->m_currentCTS ||
+ targetTime > (pC->m_currentCTS + 85))
{
bJumpNeeded = M4OSA_TRUE;
}
diff --git a/media/jni/mediaeditor/VideoEditorMain.cpp b/media/jni/mediaeditor/VideoEditorMain.cpp
index 14972a2..7d0f56f 100755
--- a/media/jni/mediaeditor/VideoEditorMain.cpp
+++ b/media/jni/mediaeditor/VideoEditorMain.cpp
@@ -182,10 +182,11 @@ static int videoEditor_getPixelsList(
jintArray pixelArray,
M4OSA_UInt32 width,
M4OSA_UInt32 height,
- M4OSA_UInt32 deltatimeMS,
M4OSA_UInt32 noOfThumbnails,
- M4OSA_UInt32 startTime,
- M4OSA_UInt32 endTime);
+ jlong startTime,
+ jlong endTime,
+ jintArray indexArray,
+ jobject callback);
static void
videoEditor_startPreview(
@@ -288,7 +289,7 @@ static JNINativeMethod gManualEditMethods[] = {
(void *)videoEditor_release },
{"nativeGetPixels", "(Ljava/lang/String;[IIIJ)I",
(void*)videoEditor_getPixels },
- {"nativeGetPixelsList", "(Ljava/lang/String;[IIIIIJJ)I",
+ {"nativeGetPixelsList", "(Ljava/lang/String;[IIIIJJ[ILandroid/media/videoeditor/MediaArtistNativeHelper$NativeGetPixelsListCallback;)I",
(void*)videoEditor_getPixelsList },
{"getMediaProperties",
"(Ljava/lang/String;)Landroid/media/videoeditor/MediaArtistNativeHelper$Properties;",
@@ -2150,75 +2151,72 @@ static int videoEditor_getPixels(
}
static int videoEditor_getPixelsList(
- JNIEnv* env,
- jobject thiz,
- jstring path,
- jintArray pixelArray,
- M4OSA_UInt32 width,
- M4OSA_UInt32 height,
- M4OSA_UInt32 deltatimeMS,
+ JNIEnv* env,
+ jobject thiz,
+ jstring path,
+ jintArray pixelArray,
+ M4OSA_UInt32 width,
+ M4OSA_UInt32 height,
M4OSA_UInt32 noOfThumbnails,
- M4OSA_UInt32 startTime,
- M4OSA_UInt32 endTime)
+ jlong startTime,
+ jlong endTime,
+ jintArray indexArray,
+ jobject callback)
{
- M4OSA_ERR err;
+ M4OSA_ERR err = M4NO_ERROR;
M4OSA_Context mContext = M4OSA_NULL;
- jint* m_dst32;
- M4OSA_UInt32 timeMS = startTime;
- int arrayOffset = 0;
-
-
-
- // Add a text marker (the condition must always be true).
- ADD_TEXT_MARKER_FUN(NULL != env)
const char *pString = env->GetStringUTFChars(path, NULL);
if (pString == M4OSA_NULL) {
- if (env != NULL) {
- jniThrowException(env, "java/lang/RuntimeException", "Input string null");
- }
+ jniThrowException(env, "java/lang/RuntimeException", "Input string null");
return M4ERR_ALLOC;
}
err = ThumbnailOpen(&mContext,(const M4OSA_Char*)pString, M4OSA_FALSE);
if (err != M4NO_ERROR || mContext == M4OSA_NULL) {
- if (env != NULL) {
- jniThrowException(env, "java/lang/RuntimeException", "ThumbnailOpen failed");
- }
+ jniThrowException(env, "java/lang/RuntimeException", "ThumbnailOpen failed");
if (pString != NULL) {
env->ReleaseStringUTFChars(path, pString);
}
return err;
}
- m_dst32 = env->GetIntArrayElements(pixelArray, NULL);
+ jlong duration = (endTime - startTime);
+ M4OSA_UInt32 tolerance = duration / (2 * noOfThumbnails);
+ jint* m_dst32 = env->GetIntArrayElements(pixelArray, NULL);
+ jint* indices = env->GetIntArrayElements(indexArray, NULL);
+ jsize len = env->GetArrayLength(indexArray);
- M4OSA_UInt32 tolerance = deltatimeMS / 2;
- do {
- err = ThumbnailGetPixels32(mContext, ((M4OSA_Int32 *)m_dst32 + arrayOffset),
- width,height,&timeMS, tolerance);
- if (err != M4NO_ERROR ) {
- if (env != NULL) {
- jniThrowException(env, "java/lang/RuntimeException",\
- "ThumbnailGetPixels32 failed");
- }
- return err;
+ jclass cls = env->GetObjectClass(callback);
+ jmethodID mid = env->GetMethodID(cls, "onThumbnail", "(I)V");
+
+ for (int i = 0; i < len; i++) {
+ int k = indices[i];
+ M4OSA_UInt32 timeMS = startTime;
+ timeMS += (2 * k + 1) * duration / (2 * noOfThumbnails);
+ err = ThumbnailGetPixels32(mContext, ((M4OSA_Int32 *)m_dst32),
+ width, height, &timeMS, tolerance);
+ if (err != M4NO_ERROR) {
+ break;
}
- timeMS += deltatimeMS;
- arrayOffset += (width * height * 4);
- noOfThumbnails--;
- } while(noOfThumbnails > 0);
+ env->CallVoidMethod(callback, mid, (jint)k);
+ }
env->ReleaseIntArrayElements(pixelArray, m_dst32, 0);
+ env->ReleaseIntArrayElements(indexArray, indices, 0);
ThumbnailClose(mContext);
if (pString != NULL) {
env->ReleaseStringUTFChars(path, pString);
}
- return err;
+ if (err != M4NO_ERROR) {
+ jniThrowException(env, "java/lang/RuntimeException",\
+ "ThumbnailGetPixels32 failed");
+ }
+ return err;
}
static M4OSA_ERR
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index b06f20d..7fb141a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -340,6 +340,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
finishFlushIfPossible();
+ } else if (what == ACodec::kWhatError) {
+ LOGE("Received error from %s decoder, aborting playback.",
+ audio ? "audio" : "video");
+
+ mRenderer->queueEOS(audio, UNKNOWN_ERROR);
} else {
CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
@@ -358,13 +363,24 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
+ int32_t finalResult;
+ CHECK(msg->findInt32("finalResult", &finalResult));
+
if (audio) {
mAudioEOS = true;
} else {
mVideoEOS = true;
}
- LOGV("reached %s EOS", audio ? "audio" : "video");
+ if (finalResult == ERROR_END_OF_STREAM) {
+ LOGV("reached %s EOS", audio ? "audio" : "video");
+ } else {
+ LOGE("%s track encountered an error (0x%08x)",
+ audio ? "audio" : "video", finalResult);
+
+ notifyListener(
+ MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
+ }
if ((mAudioEOS || mAudioDecoder == NULL)
&& (mVideoEOS || mVideoDecoder == NULL)) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 828e008..35ed43f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -200,19 +200,6 @@ void NuPlayer::Renderer::signalAudioSinkChanged() {
void NuPlayer::Renderer::onDrainAudioQueue() {
for (;;) {
- uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
-
- ssize_t numFramesAvailableToWrite =
- mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
-
- size_t numBytesAvailableToWrite =
- numFramesAvailableToWrite * mAudioSink->frameSize();
-
- if (numBytesAvailableToWrite == 0) {
- break;
- }
-
if (mAudioQueue.empty()) {
break;
}
@@ -222,13 +209,26 @@ void NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mBuffer == NULL) {
// EOS
- notifyEOS(true /* audio */);
+ notifyEOS(true /* audio */, entry->mFinalResult);
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
return;
}
+ uint32_t numFramesPlayed;
+ CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+ ssize_t numFramesAvailableToWrite =
+ mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
+
+ size_t numBytesAvailableToWrite =
+ numFramesAvailableToWrite * mAudioSink->frameSize();
+
+ if (numBytesAvailableToWrite == 0) {
+ break;
+ }
+
if (entry->mOffset == 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
@@ -330,7 +330,7 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
if (entry->mBuffer == NULL) {
// EOS
- notifyEOS(false /* audio */);
+ notifyEOS(false /* audio */, entry->mFinalResult);
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
@@ -352,10 +352,11 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
notifyPosition();
}
-void NuPlayer::Renderer::notifyEOS(bool audio) {
+void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->setInt32("audio", static_cast<int32_t>(audio));
+ notify->setInt32("finalResult", finalResult);
notify->post();
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 703e971..2713031 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -111,7 +111,7 @@ private:
void onPause();
void onResume();
- void notifyEOS(bool audio);
+ void notifyEOS(bool audio, status_t finalResult);
void notifyFlushComplete(bool audio);
void notifyPosition();
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 174ec92..5d91f6a 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -285,21 +285,6 @@ private:
////////////////////////////////////////////////////////////////////////////////
-struct ACodec::ErrorState : public ACodec::BaseState {
- ErrorState(ACodec *codec);
-
-protected:
- virtual bool onMessageReceived(const sp<AMessage> &msg);
- virtual void stateEntered();
-
- virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(ErrorState);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
struct ACodec::FlushingState : public ACodec::BaseState {
FlushingState(ACodec *codec);
@@ -335,7 +320,6 @@ ACodec::ACodec()
mExecutingToIdleState = new ExecutingToIdleState(this);
mIdleToLoadedState = new IdleToLoadedState(this);
- mErrorState = new ErrorState(this);
mFlushingState = new FlushingState(this);
mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
@@ -594,7 +578,10 @@ status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
ANativeWindowBuffer *buf;
- CHECK_EQ(mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf), 0);
+ if (mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf) != 0) {
+ LOGE("dequeueBuffer failed.");
+ return NULL;
+ }
for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
BufferInfo *info =
@@ -1263,10 +1250,12 @@ bool ACodec::BaseState::onOMXEvent(
return false;
}
- LOGE("[%s] ERROR(0x%08lx, 0x%08lx)",
- mCodec->mComponentName.c_str(), data1, data2);
+ LOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1);
- mCodec->changeState(mCodec->mErrorState);
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatError);
+ notify->setInt32("omx-error", data1);
+ notify->post();
return true;
}
@@ -1595,13 +1584,15 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
info = mCodec->dequeueBufferFromNativeWindow();
}
- LOGV("[%s] calling fillBuffer %p",
- mCodec->mComponentName.c_str(), info->mBufferID);
+ if (info != NULL) {
+ LOGV("[%s] calling fillBuffer %p",
+ mCodec->mComponentName.c_str(), info->mBufferID);
- CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
- (status_t)OK);
+ CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
+ (status_t)OK);
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ }
}
break;
}
@@ -1642,6 +1633,7 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
notify->post();
handled = true;
+ break;
}
case ACodec::kWhatFlush:
@@ -1651,6 +1643,7 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
notify->post();
handled = true;
+ break;
}
default:
@@ -1696,7 +1689,16 @@ void ACodec::UninitializedState::onSetup(
node = NULL;
}
- CHECK(node != NULL);
+ if (node == NULL) {
+ LOGE("Unable to instantiate a decoder for type '%s'.", mime.c_str());
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatError);
+ notify->setInt32("omx-error", OMX_ErrorComponentNotFound);
+ notify->post();
+
+ return;
+ }
sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id());
observer->setNotificationMessage(notify);
@@ -2236,26 +2238,6 @@ bool ACodec::IdleToLoadedState::onOMXEvent(
////////////////////////////////////////////////////////////////////////////////
-ACodec::ErrorState::ErrorState(ACodec *codec)
- : BaseState(codec) {
-}
-
-bool ACodec::ErrorState::onMessageReceived(const sp<AMessage> &msg) {
- return BaseState::onMessageReceived(msg);
-}
-
-void ACodec::ErrorState::stateEntered() {
- LOGV("[%s] Now in ErrorState", mCodec->mComponentName.c_str());
-}
-
-bool ACodec::ErrorState::onOMXEvent(
- OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
- LOGV("EVENT(%d, 0x%08lx, 0x%08lx)", event, data1, data2);
- return true;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
ACodec::FlushingState::FlushingState(ACodec *codec)
: BaseState(codec) {
}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index b9e4f9f..0b1a2af 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -131,11 +131,9 @@ LOCAL_SHARED_LIBRARIES += \
libdl \
LOCAL_STATIC_LIBRARIES += \
- libstagefright_chromium_http \
- libwebcore \
- libchromium_net \
+ libstagefright_chromium_http
-LOCAL_SHARED_LIBRARIES += libstlport
+LOCAL_SHARED_LIBRARIES += libstlport libchromium_net
include external/stlport/libstlport.mk
LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 0ea880b..99242ab 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -574,6 +574,8 @@ void AwesomePlayer::reset_l() {
mStats.mTracks.clear();
}
+ mWatchForAudioSeekComplete = false;
+ mWatchForAudioEOS = false;
}
void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) {
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
index 369a067..e5ecd5c 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
@@ -16,8 +16,7 @@
package com.android.mediaframeworktest;
-import android.media.EncoderCapabilities.AudioEncoderCap;
-import android.media.EncoderCapabilities.VideoEncoderCap;
+import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.test.InstrumentationTestRunner;
@@ -29,20 +28,21 @@ import junit.framework.TestSuite;
public class MediaRecorderStressTestRunner extends InstrumentationTestRunner {
- public static List<VideoEncoderCap> videoEncoders = MediaProfileReader.getVideoEncoders();
- public static List<AudioEncoderCap> audioEncoders = MediaProfileReader.getAudioEncoders();
-
- //Get the first capability as the default
- public static VideoEncoderCap videoEncoder = videoEncoders.get(0);
- public static AudioEncoderCap audioEncoder = audioEncoders.get(0);
+ // MediaRecorder stress test sets one of the cameras as the video source. As
+ // a result, we should make sure that the encoding parameters as input to
+ // the test must be supported by the corresponding camera.
+ public static int mCameraId = 0;
+ public static int mProfileQuality = CamcorderProfile.QUALITY_HIGH;
+ public static CamcorderProfile profile =
+ CamcorderProfile.get(mCameraId, mProfileQuality);
public static int mIterations = 100;
- public static int mVideoEncoder = videoEncoder.mCodec;
- public static int mAudioEncdoer = audioEncoder.mCodec;
- public static int mFrameRate = videoEncoder.mMaxFrameRate;
- public static int mVideoWidth = videoEncoder.mMaxFrameWidth;
- public static int mVideoHeight = videoEncoder.mMaxFrameHeight;
- public static int mBitRate = audioEncoder.mMaxBitRate;
+ public static int mVideoEncoder = profile.videoCodec;
+ public static int mAudioEncdoer = profile.audioCodec;
+ public static int mFrameRate = profile.videoFrameRate;
+ public static int mVideoWidth = profile.videoFrameWidth;
+ public static int mVideoHeight = profile.videoFrameHeight;
+ public static int mBitRate = profile.videoBitRate;
public static boolean mRemoveVideo = true;
public static int mDuration = 10000;