summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioService.java63
-rw-r--r--media/java/android/media/CamcorderProfile.java2
-rw-r--r--media/java/android/media/MediaFile.java11
-rw-r--r--media/java/android/media/MediaInserter.java74
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java7
-rw-r--r--media/java/android/media/MediaPlayer.java22
-rw-r--r--media/java/android/media/MediaRecorder.java17
-rw-r--r--media/java/android/media/MediaScanner.java62
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaImageItem.java27
-rw-r--r--media/jni/android_media_MediaScanner.cpp64
-rw-r--r--media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp71
-rw-r--r--media/libeffects/visualizer/EffectVisualizer.cpp28
-rw-r--r--media/libmedia/IMediaPlayer.cpp17
-rw-r--r--media/libmedia/MediaProfiles.cpp10
-rw-r--r--media/libmedia/mediaplayer.cpp8
-rw-r--r--media/libmediaplayerservice/Android.mk2
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp12
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h1
-rw-r--r--media/libmediaplayerservice/MidiFile.h1
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp6
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h1
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h3
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk2
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp25
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h2
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp23
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.h3
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp4
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h1
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp354
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h109
-rw-r--r--media/libstagefright/ACodec.cpp4
-rw-r--r--media/libstagefright/Android.mk2
-rw-r--r--media/libstagefright/AwesomePlayer.cpp120
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp6
-rw-r--r--media/libstagefright/DataSource.cpp2
-rw-r--r--media/libstagefright/MP3Extractor.cpp19
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp35
-rw-r--r--media/libstagefright/MediaDefs.cpp3
-rw-r--r--media/libstagefright/MediaExtractor.cpp3
-rwxr-xr-xmedia/libstagefright/OMXCodec.cpp11
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp2
-rw-r--r--media/libstagefright/StagefrightMetadataRetriever.cpp1
-rw-r--r--media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp6
-rw-r--r--media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp6
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp7
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.h2
-rw-r--r--media/libstagefright/include/ARTSPController.h97
-rw-r--r--media/libstagefright/include/AwesomePlayer.h14
-rw-r--r--media/libstagefright/include/MPEG2PSExtractor.h80
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h9
-rw-r--r--media/libstagefright/mpeg2ts/Android.mk1
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp21
-rw-r--r--media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp715
-rw-r--r--media/libstagefright/rtsp/APacketSource.cpp163
-rw-r--r--media/libstagefright/rtsp/APacketSource.h42
-rw-r--r--media/libstagefright/rtsp/ARTPConnection.cpp62
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp134
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.h2
-rw-r--r--media/libstagefright/rtsp/ARTSPController.cpp214
-rw-r--r--media/libstagefright/rtsp/Android.mk1
-rw-r--r--media/libstagefright/rtsp/MyHandler.h278
-rw-r--r--media/tests/MediaFrameworkTest/Android.mk2
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java5
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaBassBoostTest.java85
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaEqualizerTest.java79
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaVirtualizerTest.java89
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaInserterTest.java246
-rw-r--r--media/tests/players/invoke_mock_media_player.cpp1
70 files changed, 2383 insertions, 1235 deletions
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 2f32bd8..5e7a7eb 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -521,6 +521,9 @@ public class AudioService extends IAudioService.Stub {
ensureValidDirection(direction);
ensureValidStreamType(streamType);
+ // use stream type alias here so that streams with same alias have the same behavior,
+ // including with regard to silent mode control (e.g the use of STREAM_RING below and in
+ // checkForRingerModeChange() in place of STREAM_RING or STREAM_NOTIFICATION)
int streamTypeAlias = STREAM_VOLUME_ALIAS[streamType];
VolumeStreamState streamState = mStreamStates[streamTypeAlias];
final int oldIndex = (streamState.muteCount() != 0) ? streamState.mLastAudibleIndex : streamState.mIndex;
@@ -529,9 +532,8 @@ public class AudioService extends IAudioService.Stub {
// If either the client forces allowing ringer modes for this adjustment,
// or the stream type is one that is affected by ringer modes
if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
- (!mVoiceCapable && streamType != AudioSystem.STREAM_VOICE_CALL &&
- streamType != AudioSystem.STREAM_BLUETOOTH_SCO) ||
- (mVoiceCapable && streamTypeAlias == AudioSystem.STREAM_RING)) {
+ streamTypeAlias == AudioSystem.STREAM_RING ||
+ (!mVoiceCapable && streamTypeAlias == AudioSystem.STREAM_MUSIC)) {
// do not vibrate if already in vibrate mode
if (mRingerMode == AudioManager.RINGER_MODE_VIBRATE) {
flags &= ~AudioManager.FLAG_VIBRATE;
@@ -545,10 +547,19 @@ public class AudioService extends IAudioService.Stub {
int index;
if (streamState.muteCount() != 0) {
if (adjustVolume) {
- streamState.adjustLastAudibleIndex(direction);
- // Post a persist volume msg
- sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, streamType,
- SENDMSG_REPLACE, 0, 1, streamState, PERSIST_DELAY);
+ // adjust volume on all stream types sharing the same alias otherwise a query
+ // on last audible index for an alias would not give the correct value
+ int numStreamTypes = AudioSystem.getNumStreamTypes();
+ for (int i = numStreamTypes - 1; i >= 0; i--) {
+ if (STREAM_VOLUME_ALIAS[i] == streamTypeAlias) {
+ VolumeStreamState s = mStreamStates[i];
+
+ s.adjustLastAudibleIndex(direction);
+ // Post a persist volume msg
+ sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, i,
+ SENDMSG_REPLACE, 0, 1, s, PERSIST_DELAY);
+ }
+ }
}
index = streamState.mLastAudibleIndex;
} else {
@@ -2683,10 +2694,21 @@ public class AudioService extends IAudioService.Stub {
}
public void unlinkToDeath() {
- if (mSourceRef != null && mHandler != null) {
- mSourceRef.unlinkToDeath(mHandler, 0);
+ try {
+ if (mSourceRef != null && mHandler != null) {
+ mSourceRef.unlinkToDeath(mHandler, 0);
+ mHandler = null;
+ }
+ } catch (java.util.NoSuchElementException e) {
+ Log.e(TAG, "Encountered " + e + " in FocusStackEntry.unlinkToDeath()");
}
}
+
+ @Override
+ protected void finalize() throws Throwable {
+ unlinkToDeath(); // unlink exception handled inside method
+ super.finalize();
+ }
}
private Stack<FocusStackEntry> mFocusStack = new Stack<FocusStackEntry>();
@@ -2717,7 +2739,7 @@ public class AudioService extends IAudioService.Stub {
* focus, notify the next item in the stack it gained focus.
*/
private void removeFocusStackEntry(String clientToRemove, boolean signal) {
- // is the current top of the focus stack abandoning focus? (because of death or request)
+ // is the current top of the focus stack abandoning focus? (because of request, not death)
if (!mFocusStack.empty() && mFocusStack.peek().mClientId.equals(clientToRemove))
{
//Log.i(TAG, " removeFocusStackEntry() removing top of stack");
@@ -2750,7 +2772,7 @@ public class AudioService extends IAudioService.Stub {
/**
* Helper function:
* Called synchronized on mAudioFocusLock
- * Remove focus listeners from the focus stack for a particular client.
+ * Remove focus listeners from the focus stack for a particular client when it has died.
*/
private void removeFocusStackEntryForClient(IBinder cb) {
// is the owner of the audio focus part of the client to remove?
@@ -2763,6 +2785,7 @@ public class AudioService extends IAudioService.Stub {
Log.i(TAG, " AudioFocus abandonAudioFocus(): removing entry for "
+ fse.mClientId);
stackIterator.remove();
+ // the client just died, no need to unlink to its death
}
}
if (isTopOfStackForClientToRemove) {
@@ -2847,11 +2870,15 @@ public class AudioService extends IAudioService.Stub {
// if focus is already owned by this client and the reason for acquiring the focus
// hasn't changed, don't do anything
if (mFocusStack.peek().mFocusChangeType == focusChangeHint) {
+ // unlink death handler so it can be gc'ed.
+ // linkToDeath() creates a JNI global reference preventing collection.
+ cb.unlinkToDeath(afdh, 0);
return AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
}
// the reason for the audio focus request has changed: remove the current top of
// stack and respond as if we had a new focus owner
- mFocusStack.pop();
+ FocusStackEntry fse = mFocusStack.pop();
+ fse.unlinkToDeath();
}
// notify current top of stack it is losing focus
@@ -3039,6 +3066,7 @@ public class AudioService extends IAudioService.Stub {
if ((mRcClientDeathHandler != null) && (mRcClientDeathHandler.mCb != null)) {
try {
mRcClientDeathHandler.mCb.unlinkToDeath(mRcClientDeathHandler, 0);
+ mRcClientDeathHandler = null;
} catch (java.util.NoSuchElementException e) {
// not much we can do here
Log.e(TAG, "Encountered " + e + " in unlinkToRcClientDeath()");
@@ -3046,6 +3074,12 @@ public class AudioService extends IAudioService.Stub {
}
}
}
+
+ @Override
+ protected void finalize() throws Throwable {
+ unlinkToRcClientDeath();// unlink exception handled inside method
+ super.finalize();
+ }
}
/**
@@ -3092,6 +3126,7 @@ public class AudioService extends IAudioService.Stub {
if (packageName.equalsIgnoreCase(rcse.mReceiverComponent.getPackageName())) {
// a stack entry is from the package being removed, remove it from the stack
stackIterator.remove();
+ rcse.unlinkToRcClientDeath();
}
}
if (mRCStack.empty()) {
@@ -3172,6 +3207,7 @@ public class AudioService extends IAudioService.Stub {
RemoteControlStackEntry rcse = (RemoteControlStackEntry)stackIterator.next();
if(rcse.mMediaIntent.equals(pi)) {
stackIterator.remove();
+ rcse.unlinkToRcClientDeath();
break;
}
}
@@ -3433,7 +3469,7 @@ public class AudioService extends IAudioService.Stub {
rcse.mCallingPackageName = callingPackageName;
rcse.mCallingUid = Binder.getCallingUid();
if (rcClient == null) {
- rcse.mRcClientDeathHandler = null;
+ // here rcse.mRcClientDeathHandler is null;
break;
}
@@ -3489,7 +3525,6 @@ public class AudioService extends IAudioService.Stub {
rcse.unlinkToRcClientDeath();
// reset the client-related fields
rcse.mRcClient = null;
- rcse.mRcClientDeathHandler = null;
rcse.mCallingPackageName = null;
}
}
diff --git a/media/java/android/media/CamcorderProfile.java b/media/java/android/media/CamcorderProfile.java
index 51a45cd..7d60c55 100644
--- a/media/java/android/media/CamcorderProfile.java
+++ b/media/java/android/media/CamcorderProfile.java
@@ -82,7 +82,6 @@ public class CamcorderProfile
/**
* Quality level corresponding to the QVGA (320x240) resolution.
- * {@hide}
*/
public static final int QUALITY_QVGA = 7;
@@ -127,7 +126,6 @@ public class CamcorderProfile
/**
* Time lapse quality level corresponding to the QVGA (320 x 240) resolution.
- * {@hide}
*/
public static final int QUALITY_TIME_LAPSE_QVGA = 1007;
diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java
index 8793841..e275aa6 100644
--- a/media/java/android/media/MediaFile.java
+++ b/media/java/android/media/MediaFile.java
@@ -71,6 +71,11 @@ public class MediaFile {
private static final int FIRST_VIDEO_FILE_TYPE = FILE_TYPE_MP4;
private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_WEBM;
+ // More video file types
+ public static final int FILE_TYPE_MP2PS = 200;
+ private static final int FIRST_VIDEO_FILE_TYPE2 = FILE_TYPE_MP2PS;
+ private static final int LAST_VIDEO_FILE_TYPE2 = FILE_TYPE_MP2PS;
+
// Image file types
public static final int FILE_TYPE_JPEG = 31;
public static final int FILE_TYPE_GIF = 32;
@@ -235,6 +240,8 @@ public class MediaFile {
addFileType("PPT", FILE_TYPE_MS_POWERPOINT, "application/mspowerpoint", MtpConstants.FORMAT_MS_POWERPOINT_PRESENTATION);
addFileType("FLAC", FILE_TYPE_FLAC, "audio/flac", MtpConstants.FORMAT_FLAC);
addFileType("ZIP", FILE_TYPE_ZIP, "application/zip");
+ addFileType("MPG", FILE_TYPE_MP2PS, "video/mp2p");
+ addFileType("MPEG", FILE_TYPE_MP2PS, "video/mp2p");
}
public static boolean isAudioFileType(int fileType) {
@@ -246,7 +253,9 @@ public class MediaFile {
public static boolean isVideoFileType(int fileType) {
return (fileType >= FIRST_VIDEO_FILE_TYPE &&
- fileType <= LAST_VIDEO_FILE_TYPE);
+ fileType <= LAST_VIDEO_FILE_TYPE)
+ || (fileType >= FIRST_VIDEO_FILE_TYPE2 &&
+ fileType <= LAST_VIDEO_FILE_TYPE2);
}
public static boolean isImageFileType(int fileType) {
diff --git a/media/java/android/media/MediaInserter.java b/media/java/android/media/MediaInserter.java
new file mode 100644
index 0000000..a998407
--- /dev/null
+++ b/media/java/android/media/MediaInserter.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentValues;
+import android.content.IContentProvider;
+import android.net.Uri;
+import android.os.RemoteException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * A MediaScanner helper class which enables us to do lazy insertion on the
+ * given provider. This class manages buffers internally and flushes when they
+ * are full. Note that you should call flushAll() after using this class.
+ * {@hide}
+ */
+public class MediaInserter {
+ private HashMap<Uri, List<ContentValues>> mRowMap =
+ new HashMap<Uri, List<ContentValues>>();
+
+ private IContentProvider mProvider;
+ private int mBufferSizePerUri;
+
+ public MediaInserter(IContentProvider provider, int bufferSizePerUri) {
+ mProvider = provider;
+ mBufferSizePerUri = bufferSizePerUri;
+ }
+
+ public void insert(Uri tableUri, ContentValues values) throws RemoteException {
+ List<ContentValues> list = mRowMap.get(tableUri);
+ if (list == null) {
+ list = new ArrayList<ContentValues>();
+ mRowMap.put(tableUri, list);
+ }
+ list.add(new ContentValues(values));
+ if (list.size() >= mBufferSizePerUri) {
+ flush(tableUri);
+ }
+ }
+
+ public void flushAll() throws RemoteException {
+ for (Uri tableUri : mRowMap.keySet()){
+ flush(tableUri);
+ }
+ mRowMap.clear();
+ }
+
+ private void flush(Uri tableUri) throws RemoteException {
+ List<ContentValues> list = mRowMap.get(tableUri);
+ if (!list.isEmpty()) {
+ ContentValues[] valuesArray = new ContentValues[list.size()];
+ valuesArray = list.toArray(valuesArray);
+ mProvider.bulkInsert(tableUri, valuesArray);
+ list.clear();
+ }
+ }
+}
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index 10694c3..11ecd1f 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -458,5 +458,12 @@ public class MediaMetadataRetriever
* @hide
*/
public static final int METADATA_KEY_IS_DRM = 22;
+ /**
+ * This key retrieves the location information, if available.
+ * The location should be specified according to ISO-6709 standard, under
+ * a mp4/3gp box "@xyz". Location with longitude of -90 degrees and latitude
+ * of 180 degrees will be retrieved as "-90.0000+180.0000", for instance.
+ */
+ public static final int METADATA_KEY_LOCATION = 23;
// Add more here...
}
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index eb32563..8d71dcf 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -1149,14 +1149,20 @@ public class MediaPlayer
/**
* Releases resources associated with this MediaPlayer object.
* It is considered good practice to call this method when you're
- * done using the MediaPlayer. For instance, whenever the Activity
- * of an application is paused, this method should be invoked to
- * release the MediaPlayer object. In addition to unnecessary resources
- * (such as memory and instances of codecs) being hold, failure to
- * call this method immediately if a MediaPlayer object is no longer
- * needed may also lead to continuous battery consumption for mobile
- * devices, and playback failure if no multiple instances of the
- * same codec is supported on a device.
+ * done using the MediaPlayer. In particular, whenever an Activity
+ * of an application is paused (its onPause() method is called),
+ * or stopped (its onStop() method is called), this method should be
+ * invoked to release the MediaPlayer object, unless the application
+ * has a special need to keep the object around. In addition to
+ * unnecessary resources (such as memory and instances of codecs)
+ * being held, failure to call this method immediately if a
+ * MediaPlayer object is no longer needed may also lead to
+ * continuous battery consumption for mobile devices, and playback
+ * failure for other applications if no multiple instances of the
+ * same codec are supported on a device. Even if multiple instances
+ * of the same codec are supported, some performance degradation
+ * may be expected when unnecessary multiple instances are used
+ * at the same time.
*/
public void release() {
stayAwake(false);
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 7b42ac3..08e6032 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -305,7 +305,7 @@ public class MediaRecorder
setVideoEncodingBitRate(profile.videoBitRate);
setVideoEncoder(profile.videoCodec);
if (profile.quality >= CamcorderProfile.QUALITY_TIME_LAPSE_LOW &&
- profile.quality <= CamcorderProfile.QUALITY_TIME_LAPSE_1080P) {
+ profile.quality <= CamcorderProfile.QUALITY_TIME_LAPSE_QVGA) {
// Enable time lapse. Also don't set audio for time lapse.
setParameter(String.format("time-lapse-enable=1"));
} else {
@@ -926,7 +926,20 @@ public class MediaRecorder
/**
* Releases resources associated with this MediaRecorder object.
* It is good practice to call this method when you're done
- * using the MediaRecorder.
+ * using the MediaRecorder. In particular, whenever an Activity
+ * of an application is paused (its onPause() method is called),
+ * or stopped (its onStop() method is called), this method should be
+ * invoked to release the MediaRecorder object, unless the application
+ * has a special need to keep the object around. In addition to
+ * unnecessary resources (such as memory and instances of codecs)
+ * being held, failure to call this method immediately if a
+ * MediaRecorder object is no longer needed may also lead to
+ * continuous battery consumption for mobile devices, and recording
+ * failure for other applications if no multiple instances of the
+ * same codec are supported on a device. Even if multiple instances
+ * of the same codec are supported, some performance degradation
+ * may be expected when unnecessary multiple instances are used
+ * at the same time.
*/
public native void release();
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index 2d927ad..386986e 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -377,43 +377,7 @@ public class MediaScanner
}
}
- private class FileInserter {
-
- private final Uri mUri;
- private final ContentValues[] mValues;
- private int mIndex;
-
- public FileInserter(Uri uri, int count) {
- mUri = uri;
- mValues = new ContentValues[count];
- }
-
- public Uri insert(ContentValues values) {
- if (mIndex == mValues.length) {
- flush();
- }
- mValues[mIndex++] = values;
- // URI not needed when doing bulk inserts
- return null;
- }
-
- public void flush() {
- while (mIndex < mValues.length) {
- mValues[mIndex++] = null;
- }
- try {
- mMediaProvider.bulkInsert(mUri, mValues);
- } catch (RemoteException e) {
- Log.e(TAG, "RemoteException in FileInserter.flush()", e);
- }
- mIndex = 0;
- }
- }
-
- private FileInserter mAudioInserter;
- private FileInserter mVideoInserter;
- private FileInserter mImageInserter;
- private FileInserter mFileInserter;
+ private MediaInserter mMediaInserter;
// hashes file path to FileCacheEntry.
// path should be lower case if mCaseInsensitivePaths is true
@@ -880,17 +844,14 @@ public class MediaScanner
}
Uri tableUri = mFilesUri;
- FileInserter inserter = mFileInserter;
+ MediaInserter inserter = mMediaInserter;
if (!mNoMedia) {
if (MediaFile.isVideoFileType(mFileType)) {
tableUri = mVideoUri;
- inserter = mVideoInserter;
} else if (MediaFile.isImageFileType(mFileType)) {
tableUri = mImagesUri;
- inserter = mImageInserter;
} else if (MediaFile.isAudioFileType(mFileType)) {
tableUri = mAudioUri;
- inserter = mAudioInserter;
}
}
Uri result = null;
@@ -913,7 +874,7 @@ public class MediaScanner
if (inserter == null || entry.mFormat == MtpConstants.FORMAT_ASSOCIATION) {
result = mMediaProvider.insert(tableUri, values);
} else {
- result = inserter.insert(values);
+ inserter.insert(tableUri, values);
}
if (result != null) {
@@ -1212,11 +1173,8 @@ public class MediaScanner
long prescan = System.currentTimeMillis();
if (ENABLE_BULK_INSERTS) {
- // create FileInserters for bulk inserts
- mAudioInserter = new FileInserter(mAudioUri, 500);
- mVideoInserter = new FileInserter(mVideoUri, 500);
- mImageInserter = new FileInserter(mImagesUri, 500);
- mFileInserter = new FileInserter(mFilesUri, 500);
+ // create MediaInserter for bulk inserts
+ mMediaInserter = new MediaInserter(mMediaProvider, 500);
}
for (int i = 0; i < directories.length; i++) {
@@ -1225,14 +1183,8 @@ public class MediaScanner
if (ENABLE_BULK_INSERTS) {
// flush remaining inserts
- mAudioInserter.flush();
- mVideoInserter.flush();
- mImageInserter.flush();
- mFileInserter.flush();
- mAudioInserter = null;
- mVideoInserter = null;
- mImageInserter = null;
- mFileInserter = null;
+ mMediaInserter.flushAll();
+ mMediaInserter = null;
}
long scan = System.currentTimeMillis();
diff --git a/media/java/android/media/videoeditor/MediaImageItem.java b/media/java/android/media/videoeditor/MediaImageItem.java
index a862d00..590b4ae 100755
--- a/media/java/android/media/videoeditor/MediaImageItem.java
+++ b/media/java/android/media/videoeditor/MediaImageItem.java
@@ -154,7 +154,7 @@ public class MediaImageItem extends MediaItem {
final Bitmap imageBitmap;
- if (mHeight > maxResolution.second) {
+ if (mWidth > maxResolution.first || mHeight > maxResolution.second) {
/**
* We need to scale the image
*/
@@ -971,14 +971,13 @@ public class MediaImageItem extends MediaItem {
/**
* Create the bitmap from file
*/
- if (nativeWidth / bitmapWidth > 1) {
-
- final BitmapFactory.Options options = new BitmapFactory.Options();
- options.inSampleSize = nativeWidth / (int)bitmapWidth;
- srcBitmap = BitmapFactory.decodeFile(filename, options);
- } else {
- srcBitmap = BitmapFactory.decodeFile(filename);
- }
+ int sampleSize = (int) Math.ceil(Math.max(
+ (float) nativeWidth / bitmapWidth,
+ (float) nativeHeight / bitmapHeight));
+ sampleSize = nextPowerOf2(sampleSize);
+ final BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inSampleSize = sampleSize;
+ srcBitmap = BitmapFactory.decodeFile(filename, options);
} else {
bitmapWidth = width;
bitmapHeight = height;
@@ -1009,4 +1008,14 @@ public class MediaImageItem extends MediaItem {
srcBitmap.recycle();
return bitmap;
}
+
+ public static int nextPowerOf2(int n) {
+ n -= 1;
+ n |= n >>> 16;
+ n |= n >>> 8;
+ n |= n >>> 4;
+ n |= n >>> 2;
+ n |= n >>> 1;
+ return n + 1;
+ }
}
diff --git a/media/jni/android_media_MediaScanner.cpp b/media/jni/android_media_MediaScanner.cpp
index b88296f..09152f5 100644
--- a/media/jni/android_media_MediaScanner.cpp
+++ b/media/jni/android_media_MediaScanner.cpp
@@ -56,6 +56,53 @@ static status_t checkAndClearExceptionFromCallback(JNIEnv* env, const char* meth
return OK;
}
+// stolen from dalvik/vm/checkJni.cpp
+static bool isValidUtf8(const char* bytes) {
+ while (*bytes != '\0') {
+ unsigned char utf8 = *(bytes++);
+ // Switch on the high four bits.
+ switch (utf8 >> 4) {
+ case 0x00:
+ case 0x01:
+ case 0x02:
+ case 0x03:
+ case 0x04:
+ case 0x05:
+ case 0x06:
+ case 0x07:
+ // Bit pattern 0xxx. No need for any extra bytes.
+ break;
+ case 0x08:
+ case 0x09:
+ case 0x0a:
+ case 0x0b:
+ case 0x0f:
+ /*
+ * Bit pattern 10xx or 1111, which are illegal start bytes.
+ * Note: 1111 is valid for normal UTF-8, but not the
+ * modified UTF-8 used here.
+ */
+ return false;
+ case 0x0e:
+ // Bit pattern 1110, so there are two additional bytes.
+ utf8 = *(bytes++);
+ if ((utf8 & 0xc0) != 0x80) {
+ return false;
+ }
+ // Fall through to take care of the final byte.
+ case 0x0c:
+ case 0x0d:
+ // Bit pattern 110x, so there is one additional byte.
+ utf8 = *(bytes++);
+ if ((utf8 & 0xc0) != 0x80) {
+ return false;
+ }
+ break;
+ }
+ }
+ return true;
+}
+
class MyMediaScannerClient : public MediaScannerClient
{
public:
@@ -123,7 +170,22 @@ public:
mEnv->ExceptionClear();
return NO_MEMORY;
}
- if ((valueStr = mEnv->NewStringUTF(value)) == NULL) {
+ char *cleaned = NULL;
+ if (!isValidUtf8(value)) {
+ cleaned = strdup(value);
+ char *chp = cleaned;
+ char ch;
+ while ((ch = *chp)) {
+ if (ch & 0x80) {
+ *chp = '?';
+ }
+ chp++;
+ }
+ value = cleaned;
+ }
+ valueStr = mEnv->NewStringUTF(value);
+ free(cleaned);
+ if (valueStr == NULL) {
mEnv->DeleteLocalRef(nameStr);
mEnv->ExceptionClear();
return NO_MEMORY;
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index efa1c45..5a1e93a 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -49,6 +49,16 @@ extern "C" const struct effect_interface_s gLvmEffectInterface;
}\
}
+
+static inline int16_t clamp16(int32_t sample)
+{
+ // check overflow for both positive and negative values:
+ // all bits above short range must me equal to sign bit
+ if ((sample>>15) ^ (sample>>31))
+ sample = 0x7FFF ^ (sample>>31);
+ return sample;
+}
+
// Namespaces
namespace android {
namespace {
@@ -707,13 +717,6 @@ int LvmBundle_init(EffectContext *pContext){
} /* end LvmBundle_init */
-static inline int16_t clamp16(int32_t sample)
-{
- if ((sample>>15) ^ (sample>>31))
- sample = 0x7FFF ^ (sample>>31);
- return sample;
-}
-
//----------------------------------------------------------------------------
// LvmBundle_process()
//----------------------------------------------------------------------------
@@ -2459,6 +2462,9 @@ int Effect_setEnabled(EffectContext *pContext, bool enabled)
LOGV("\tEffect_setEnabled() type %d, enabled %d", pContext->EffectType, enabled);
if (enabled) {
+ // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
+ // to their nature.
+ bool tempDisabled = false;
switch (pContext->EffectType) {
case LVM_BASS_BOOST:
if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
@@ -2471,6 +2477,7 @@ int Effect_setEnabled(EffectContext *pContext, bool enabled)
pContext->pBundledContext->SamplesToExitCountBb =
(LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
pContext->pBundledContext->bBassEnabled = LVM_TRUE;
+ tempDisabled = pContext->pBundledContext->bBassTempDisabled;
break;
case LVM_EQUALIZER:
if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
@@ -2495,6 +2502,7 @@ int Effect_setEnabled(EffectContext *pContext, bool enabled)
pContext->pBundledContext->SamplesToExitCountVirt =
(LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
+ tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled;
break;
case LVM_VOLUME:
if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE) {
@@ -2508,7 +2516,9 @@ int Effect_setEnabled(EffectContext *pContext, bool enabled)
LOGV("\tEffect_setEnabled() invalid effect type");
return -EINVAL;
}
- LvmEffect_enable(pContext);
+ if (!tempDisabled) {
+ LvmEffect_enable(pContext);
+ }
} else {
switch (pContext->EffectType) {
case LVM_BASS_BOOST:
@@ -2683,12 +2693,19 @@ int Effect_process(effect_handle_t self,
LOGV("\tLVM_ERROR : LvmBundle_process returned error %d", lvmStatus);
return lvmStatus;
}
- }else{
+ } else {
//LOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
//pContext->pBundledContext->NumberEffectsEnabled,
//pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
// 2 is for stereo input
- memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount*sizeof(LVM_INT16)*2);
+ if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ for (size_t i=0; i < outBuffer->frameCount*2; i++){
+ outBuffer->s16[i] =
+ clamp16((LVM_INT32)outBuffer->s16[i] + (LVM_INT32)inBuffer->s16[i]);
+ }
+ } else {
+ memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount*sizeof(LVM_INT16)*2);
+ }
}
return status;
@@ -3047,9 +3064,10 @@ int Effect_command(effect_handle_t self,
LOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
uint32_t device = *(uint32_t *)pCmdData;
- if(pContext->EffectType == LVM_BASS_BOOST){
- if((device == AUDIO_DEVICE_OUT_SPEAKER)||(device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+ if (pContext->EffectType == LVM_BASS_BOOST) {
+ if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
LOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
*(int32_t *)pCmdData);
LOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
@@ -3058,30 +3076,31 @@ int Effect_command(effect_handle_t self,
// the effect must still report its original state as this can only be changed
// by the ENABLE/DISABLE command
- if(pContext->pBundledContext->bBassEnabled == LVM_TRUE){
+ if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
LOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d",
*(int32_t *)pCmdData);
android::LvmEffect_disable(pContext);
- pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
}
- }else{
+ pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
+ } else {
LOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d",
*(int32_t *)pCmdData);
// If a device supports bassboost and the effect has been temporarily disabled
// previously then re-enable it
- if(pContext->pBundledContext->bBassTempDisabled == LVM_TRUE){
+ if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
LOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d",
*(int32_t *)pCmdData);
android::LvmEffect_enable(pContext);
- pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
}
+ pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
}
}
- if(pContext->EffectType == LVM_VIRTUALIZER){
- if((device == AUDIO_DEVICE_OUT_SPEAKER)||(device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
+ if((device == AUDIO_DEVICE_OUT_SPEAKER)||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
LOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
*(int32_t *)pCmdData);
LOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
@@ -3090,25 +3109,25 @@ int Effect_command(effect_handle_t self,
// the effect must still report its original state as this can only be changed
// by the ENABLE/DISABLE command
- if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+ if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
LOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
*(int32_t *)pCmdData);
android::LvmEffect_disable(pContext);
- pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
}
- }else{
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
+ } else {
LOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
*(int32_t *)pCmdData);
// If a device supports virtualizer and the effect has been temporarily disabled
// previously then re-enable it
- if(pContext->pBundledContext->bVirtualizerTempDisabled == LVM_TRUE){
+ if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
LOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
*(int32_t *)pCmdData);
android::LvmEffect_enable(pContext);
- pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
}
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
}
}
LOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index 3c3af8f..1a06cc6 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -47,17 +47,22 @@ enum visualizer_state_e {
VISUALIZER_STATE_ACTIVE,
};
+// maximum number of reads from same buffer before resetting capture buffer. This means
+// that the framework has stopped playing audio and we must start returning silence
+#define MAX_STALL_COUNT 10
+
struct VisualizerContext {
const struct effect_interface_s *mItfe;
effect_config_t mConfig;
- uint32_t mState;
uint32_t mCaptureIdx;
uint32_t mCaptureSize;
- uint32_t mCurrentBuf;
+ uint8_t mState;
+ uint8_t mCurrentBuf;
+ uint8_t mLastBuf;
+ uint8_t mStallCount;
uint8_t mCaptureBuf[2][VISUALIZER_CAPTURE_SIZE_MAX];
};
-
//
//--- Local functions
//
@@ -66,6 +71,8 @@ void Visualizer_reset(VisualizerContext *pContext)
{
pContext->mCaptureIdx = 0;
pContext->mCurrentBuf = 0;
+ pContext->mLastBuf = 1;
+ pContext->mStallCount = 0;
memset(pContext->mCaptureBuf[0], 0x80, VISUALIZER_CAPTURE_SIZE_MAX);
memset(pContext->mCaptureBuf[1], 0x80, VISUALIZER_CAPTURE_SIZE_MAX);
}
@@ -417,9 +424,24 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
memcpy(pReplyData,
pContext->mCaptureBuf[pContext->mCurrentBuf ^ 1],
pContext->mCaptureSize);
+ // if audio framework has stopped playing audio although the effect is still
+ // active we must clear the capture buffer to return silence
+ if (pContext->mLastBuf == pContext->mCurrentBuf) {
+ if (pContext->mStallCount < MAX_STALL_COUNT) {
+ if (++pContext->mStallCount == MAX_STALL_COUNT) {
+ memset(pContext->mCaptureBuf[pContext->mCurrentBuf ^ 1],
+ 0x80,
+ pContext->mCaptureSize);
+ }
+ }
+ } else {
+ pContext->mStallCount = 0;
+ }
+ pContext->mLastBuf = pContext->mCurrentBuf;
} else {
memset(pReplyData, 0x80, pContext->mCaptureSize);
}
+
break;
default:
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 50a41ca..9c1e6b7 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -35,7 +35,6 @@ enum {
SET_DATA_SOURCE_URL,
SET_DATA_SOURCE_FD,
SET_DATA_SOURCE_STREAM,
- SET_VIDEO_SURFACE,
PREPARE_ASYNC,
START,
STOP,
@@ -112,16 +111,6 @@ public:
return reply.readInt32();
}
- // pass the buffered Surface to the media player service
- status_t setVideoSurface(const sp<Surface>& surface)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
- Surface::writeToParcel(surface, &data);
- remote()->transact(SET_VIDEO_SURFACE, data, &reply);
- return reply.readInt32();
- }
-
// pass the buffered ISurfaceTexture to the media player service
status_t setVideoSurfaceTexture(const sp<ISurfaceTexture>& surfaceTexture)
{
@@ -345,12 +334,6 @@ status_t BnMediaPlayer::onTransact(
reply->writeInt32(setDataSource(source));
return NO_ERROR;
}
- case SET_VIDEO_SURFACE: {
- CHECK_INTERFACE(IMediaPlayer, data, reply);
- sp<Surface> surface = Surface::readFromParcel(data);
- reply->writeInt32(setVideoSurface(surface));
- return NO_ERROR;
- } break;
case SET_VIDEO_SURFACETEXTURE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
sp<ISurfaceTexture> surfaceTexture =
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index ad55ff8..6096b72 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -404,11 +404,12 @@ MediaProfiles::createVideoEditorCap(const char **atts, MediaProfiles *profiles)
CHECK(!strcmp("maxInputFrameWidth", atts[0]) &&
!strcmp("maxInputFrameHeight", atts[2]) &&
!strcmp("maxOutputFrameWidth", atts[4]) &&
- !strcmp("maxOutputFrameHeight", atts[6]));
+ !strcmp("maxOutputFrameHeight", atts[6]) &&
+ !strcmp("maxPrefetchYUVFrames", atts[8]));
MediaProfiles::VideoEditorCap *pVideoEditorCap =
new MediaProfiles::VideoEditorCap(atoi(atts[1]), atoi(atts[3]),
- atoi(atts[5]), atoi(atts[7]));
+ atoi(atts[5]), atoi(atts[7]), atoi(atts[9]));
logVideoEditorCap(*pVideoEditorCap);
profiles->mVideoEditorCap = pVideoEditorCap;
@@ -850,7 +851,8 @@ MediaProfiles::createDefaultVideoEditorCap(MediaProfiles *profiles)
VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH,
VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT,
VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH,
- VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT);
+ VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT,
+ VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES);
}
/*static*/ void
MediaProfiles::createDefaultExportVideoProfiles(MediaProfiles *profiles)
@@ -1019,6 +1021,8 @@ int MediaProfiles::getVideoEditorCapParamByName(const char *name) const
return mVideoEditorCap->mMaxOutputFrameWidth;
if (!strcmp("videoeditor.output.height.max", name))
return mVideoEditorCap->mMaxOutputFrameHeight;
+ if (!strcmp("maxPrefetchYUVFrames", name))
+ return mVideoEditorCap->mMaxPrefetchYUVFrames;
LOGE("The given video editor param name %s is not found", name);
return -1;
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index f72300b..c2e1ddf 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -219,14 +219,6 @@ status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *m
return mPlayer->getMetadata(update_only, apply_filter, metadata);
}
-status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
-{
- LOGV("setVideoSurface");
- Mutex::Autolock _l(mLock);
- if (mPlayer == 0) return NO_INIT;
- return mPlayer->setVideoSurface(surface);
-}
-
status_t MediaPlayer::setVideoSurfaceTexture(
const sp<ISurfaceTexture>& surfaceTexture)
{
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index ec7d8a0..a3e2517 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -32,8 +32,8 @@ LOCAL_SHARED_LIBRARIES := \
libdl
LOCAL_STATIC_LIBRARIES := \
- libstagefright_rtsp \
libstagefright_nuplayer \
+ libstagefright_rtsp \
LOCAL_C_INCLUDES := \
$(JNI_H_INCLUDE) \
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index ba9f54f..e8d0f0c 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -589,6 +589,10 @@ player_type getPlayerType(const char* url)
}
}
+ if (!strncasecmp("rtsp://", url, 7)) {
+ return NU_PLAYER;
+ }
+
// use MidiFile for MIDI extensions
int lenURL = strlen(url);
for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
@@ -784,14 +788,6 @@ status_t MediaPlayerService::Client::setDataSource(
return mStatus;
}
-status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
-{
- LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
- return p->setVideoSurface(surface);
-}
-
void MediaPlayerService::Client::disconnectNativeWindow() {
if (mConnectedWindow != NULL) {
status_t err = native_window_api_disconnect(mConnectedWindow.get(),
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 62214ba..04d9e28 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -248,7 +248,6 @@ private:
// IMediaPlayer interface
virtual void disconnect();
- virtual status_t setVideoSurface(const sp<Surface>& surface);
virtual status_t setVideoSurfaceTexture(
const sp<ISurfaceTexture>& surfaceTexture);
virtual status_t prepareAsync();
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index b35696f..3469389 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,6 @@ public:
const char* path, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
virtual status_t setVideoSurfaceTexture(
const sp<ISurfaceTexture>& surfaceTexture)
{ return UNKNOWN_ERROR; }
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index cd4b1ef..598d573 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -69,12 +69,6 @@ status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) {
return mPlayer->setDataSource(source);
}
-status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
- LOGV("setVideoSurface");
-
- return mPlayer->setSurface(surface);
-}
-
status_t StagefrightPlayer::setVideoSurfaceTexture(
const sp<ISurfaceTexture> &surfaceTexture) {
LOGV("setVideoSurfaceTexture");
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index cbc6d49..e89e18a 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -40,7 +40,6 @@ public:
virtual status_t setDataSource(const sp<IStreamSource> &source);
- virtual status_t setVideoSurface(const sp<Surface> &surface);
virtual status_t setVideoSurfaceTexture(
const sp<ISurfaceTexture> &surfaceTexture);
virtual status_t prepare();
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 802a11b..91ffa7d 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,9 +75,6 @@ class TestPlayerStub : public MediaPlayerInterface {
// All the methods below wrap the mPlayer instance.
- virtual status_t setVideoSurface(const android::sp<android::Surface>& s) {
- return mPlayer->setVideoSurface(s);
- }
virtual status_t setVideoSurfaceTexture(
const android::sp<android::ISurfaceTexture>& st) {
return mPlayer->setVideoSurfaceTexture(st);
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index e761509..33e2f93 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -8,6 +8,7 @@ LOCAL_SRC_FILES:= \
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
NuPlayerStreamListener.cpp \
+ RTSPSource.cpp \
StreamingSource.cpp \
LOCAL_C_INCLUDES := \
@@ -15,6 +16,7 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/base/media/libstagefright/include \
$(TOP)/frameworks/base/media/libstagefright/mpeg2ts \
$(TOP)/frameworks/base/media/libstagefright/httplive \
+ $(TOP)/frameworks/base/media/libstagefright/rtsp \
LOCAL_MODULE:= libstagefright_nuplayer
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 6c54130..2a5c0a6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -25,6 +25,7 @@
#include "NuPlayerDriver.h"
#include "NuPlayerRenderer.h"
#include "NuPlayerSource.h"
+#include "RTSPSource.h"
#include "StreamingSource.h"
#include "ATSParser.h"
@@ -87,13 +88,14 @@ void NuPlayer::setDataSource(
const char *url, const KeyedVector<String8, String8> *headers) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
- msg->setObject("source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
- msg->post();
-}
+ if (!strncasecmp(url, "rtsp://", 7)) {
+ msg->setObject(
+ "source", new RTSPSource(url, headers, mUIDValid, mUID));
+ } else {
+ msg->setObject(
+ "source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
+ }
-void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
- sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
- msg->setObject("native-window", new NativeWindowWrapper(surface));
msg->post();
}
@@ -568,8 +570,15 @@ void NuPlayer::finishReset() {
CHECK(mAudioDecoder == NULL);
CHECK(mVideoDecoder == NULL);
+ ++mScanSourcesGeneration;
+ mScanSourcesPending = false;
+
mRenderer.clear();
- mSource.clear();
+
+ if (mSource != NULL) {
+ mSource->stop();
+ mSource.clear();
+ }
if (mDriver != NULL) {
sp<NuPlayerDriver> driver = mDriver.promote();
@@ -781,7 +790,7 @@ void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
return;
}
- driver->sendEvent(msg, ext1, ext2);
+ driver->notifyListener(msg, ext1, ext2);
}
void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index a5382b4..f23deea 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -42,7 +42,6 @@ struct NuPlayer : public AHandler {
void setDataSource(
const char *url, const KeyedVector<String8, String8> *headers);
- void setVideoSurface(const sp<Surface> &surface);
void setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
void start();
@@ -68,6 +67,7 @@ private:
struct Renderer;
struct Source;
struct StreamingSource;
+ struct RTSPSource;
enum {
kWhatSetDataSource = '=DaS',
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index b1e917d..5aa99bf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -35,6 +35,7 @@ NuPlayerDriver::NuPlayerDriver()
mNumFramesDropped(0),
mLooper(new ALooper),
mState(UNINITIALIZED),
+ mAtEOS(false),
mStartupSeekTimeUs(-1) {
mLooper->setName("NuPlayerDriver Looper");
@@ -88,12 +89,6 @@ status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
return OK;
}
-status_t NuPlayerDriver::setVideoSurface(const sp<Surface> &surface) {
- mPlayer->setVideoSurface(surface);
-
- return OK;
-}
-
status_t NuPlayerDriver::setVideoSurfaceTexture(
const sp<ISurfaceTexture> &surfaceTexture) {
mPlayer->setVideoSurfaceTexture(surfaceTexture);
@@ -106,7 +101,7 @@ status_t NuPlayerDriver::prepare() {
}
status_t NuPlayerDriver::prepareAsync() {
- sendEvent(MEDIA_PREPARED);
+ notifyListener(MEDIA_PREPARED);
return OK;
}
@@ -117,6 +112,7 @@ status_t NuPlayerDriver::start() {
return INVALID_OPERATION;
case STOPPED:
{
+ mAtEOS = false;
mPlayer->start();
if (mStartupSeekTimeUs >= 0) {
@@ -173,7 +169,7 @@ status_t NuPlayerDriver::pause() {
}
bool NuPlayerDriver::isPlaying() {
- return mState == PLAYING;
+ return mState == PLAYING && !mAtEOS;
}
status_t NuPlayerDriver::seekTo(int msec) {
@@ -190,6 +186,7 @@ status_t NuPlayerDriver::seekTo(int msec) {
case PLAYING:
case PAUSED:
{
+ mAtEOS = false;
mPlayer->seekToAsync(seekTimeUs);
break;
}
@@ -291,7 +288,7 @@ void NuPlayerDriver::notifyPosition(int64_t positionUs) {
}
void NuPlayerDriver::notifySeekComplete() {
- sendEvent(MEDIA_SEEK_COMPLETE);
+ notifyListener(MEDIA_SEEK_COMPLETE);
}
void NuPlayerDriver::notifyFrameStats(
@@ -320,4 +317,12 @@ status_t NuPlayerDriver::dump(int fd, const Vector<String16> &args) const {
return OK;
}
+void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) {
+ if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) {
+ mAtEOS = true;
+ }
+
+ sendEvent(msg, ext1, ext2);
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 181c37d..4a0026c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -37,7 +37,6 @@ struct NuPlayerDriver : public MediaPlayerInterface {
virtual status_t setDataSource(const sp<IStreamSource> &source);
- virtual status_t setVideoSurface(const sp<Surface> &surface);
virtual status_t setVideoSurfaceTexture(
const sp<ISurfaceTexture> &surfaceTexture);
virtual status_t prepare();
@@ -67,6 +66,7 @@ struct NuPlayerDriver : public MediaPlayerInterface {
void notifyPosition(int64_t positionUs);
void notifySeekComplete();
void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
+ void notifyListener(int msg, int ext1 = 0, int ext2 = 0);
protected:
virtual ~NuPlayerDriver();
@@ -95,6 +95,7 @@ private:
};
State mState;
+ bool mAtEOS;
int64_t mStartupSeekTimeUs;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 61a7ba4..640e9fa 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -219,7 +219,9 @@ void NuPlayer::Renderer::signalAudioSinkChanged() {
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+ if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
+ return false;
+ }
ssize_t numFramesAvailableToWrite =
mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 8a7eece..531b29f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -28,6 +28,7 @@ struct NuPlayer::Source : public RefBase {
Source() {}
virtual void start() = 0;
+ virtual void stop() {}
// Returns OK iff more data was available,
// an error or ERROR_END_OF_STREAM if not.
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
new file mode 100644
index 0000000..e72adc4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTSPSource"
+#include <utils/Log.h>
+
+#include "RTSPSource.h"
+
+#include "AnotherPacketSource.h"
+#include "MyHandler.h"
+
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::RTSPSource::RTSPSource(
+ const char *url,
+ const KeyedVector<String8, String8> *headers,
+ bool uidValid,
+ uid_t uid)
+ : mURL(url),
+ mUIDValid(uidValid),
+ mUID(uid),
+ mFlags(0),
+ mState(DISCONNECTED),
+ mFinalResult(OK),
+ mDisconnectReplyID(0) {
+ if (headers) {
+ mExtraHeaders = *headers;
+
+ ssize_t index =
+ mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+ if (index >= 0) {
+ mFlags |= kFlagIncognito;
+
+ mExtraHeaders.removeItemsAt(index);
+ }
+ }
+}
+
+NuPlayer::RTSPSource::~RTSPSource() {
+ if (mLooper != NULL) {
+ mLooper->stop();
+ }
+}
+
+void NuPlayer::RTSPSource::start() {
+ if (mLooper == NULL) {
+ mLooper = new ALooper;
+ mLooper->setName("rtsp");
+ mLooper->start();
+
+ mReflector = new AHandlerReflector<RTSPSource>(this);
+ mLooper->registerHandler(mReflector);
+ }
+
+ CHECK(mHandler == NULL);
+
+ sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+
+ mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
+ mLooper->registerHandler(mHandler);
+
+ CHECK_EQ(mState, (int)DISCONNECTED);
+ mState = CONNECTING;
+
+ mHandler->connect();
+}
+
+void NuPlayer::RTSPSource::stop() {
+ sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id());
+
+ sp<AMessage> dummy;
+ msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTSPSource::feedMoreTSData() {
+ return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTSPSource::getFormat(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
+
+ if (source == NULL) {
+ return NULL;
+ }
+
+ return source->getFormat();
+}
+
+status_t NuPlayer::RTSPSource::dequeueAccessUnit(
+ bool audio, sp<ABuffer> *accessUnit) {
+ sp<AnotherPacketSource> source = getSource(audio);
+
+ if (source == NULL) {
+ return -EWOULDBLOCK;
+ }
+
+ status_t finalResult;
+ if (!source->hasBufferAvailable(&finalResult)) {
+ return finalResult == OK ? -EWOULDBLOCK : finalResult;
+ }
+
+ return source->dequeueAccessUnit(accessUnit);
+}
+
+sp<AnotherPacketSource> NuPlayer::RTSPSource::getSource(bool audio) {
+ return audio ? mAudioTrack : mVideoTrack;
+}
+
+status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
+ *durationUs = 0ll;
+
+ int64_t audioDurationUs;
+ if (mAudioTrack != NULL
+ && mAudioTrack->getFormat()->findInt64(
+ kKeyDuration, &audioDurationUs)
+ && audioDurationUs > *durationUs) {
+ *durationUs = audioDurationUs;
+ }
+
+ int64_t videoDurationUs;
+ if (mVideoTrack != NULL
+ && mVideoTrack->getFormat()->findInt64(
+ kKeyDuration, &videoDurationUs)
+ && videoDurationUs > *durationUs) {
+ *durationUs = videoDurationUs;
+ }
+
+ return OK;
+}
+
+status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
+ if (mState != CONNECTED) {
+ return UNKNOWN_ERROR;
+ }
+
+ mState = SEEKING;
+ mHandler->seek(seekTimeUs);
+
+ return OK;
+}
+
+bool NuPlayer::RTSPSource::isSeekable() {
+ return true;
+}
+
+void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
+ if (msg->what() == kWhatDisconnect) {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ mDisconnectReplyID = replyID;
+ finishDisconnectIfPossible();
+ return;
+ }
+
+ CHECK_EQ(msg->what(), (int)kWhatNotify);
+
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case MyHandler::kWhatConnected:
+ onConnected();
+ break;
+
+ case MyHandler::kWhatDisconnected:
+ onDisconnected(msg);
+ break;
+
+ case MyHandler::kWhatSeekDone:
+ {
+ mState = CONNECTED;
+ break;
+ }
+
+ case MyHandler::kWhatAccessUnit:
+ {
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK_LT(trackIndex, mTracks.size());
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("accessUnit", &obj));
+
+ sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
+
+ int32_t damaged;
+ if (accessUnit->meta()->findInt32("damaged", &damaged)
+ && damaged) {
+ LOGI("dropping damaged access unit.");
+ break;
+ }
+
+ const TrackInfo &info = mTracks.editItemAt(trackIndex);
+ sp<AnotherPacketSource> source = info.mSource;
+ if (source != NULL) {
+#if 1
+ uint32_t rtpTime;
+ CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+ int64_t nptUs =
+ ((double)rtpTime - (double)info.mRTPTime)
+ / info.mTimeScale
+ * 1000000ll
+ + info.mNormalPlaytimeUs;
+
+ accessUnit->meta()->setInt64("timeUs", nptUs);
+#endif
+
+ source->queueAccessUnit(accessUnit);
+ }
+ break;
+ }
+
+ case MyHandler::kWhatEOS:
+ {
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK_LT(trackIndex, mTracks.size());
+
+ int32_t finalResult;
+ CHECK(msg->findInt32("finalResult", &finalResult));
+ CHECK_NE(finalResult, (status_t)OK);
+
+ TrackInfo *info = &mTracks.editItemAt(trackIndex);
+ sp<AnotherPacketSource> source = info->mSource;
+ if (source != NULL) {
+ source->signalEOS(finalResult);
+ }
+
+ break;
+ }
+
+ case MyHandler::kWhatSeekDiscontinuity:
+ {
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK_LT(trackIndex, mTracks.size());
+
+ TrackInfo *info = &mTracks.editItemAt(trackIndex);
+ sp<AnotherPacketSource> source = info->mSource;
+ if (source != NULL) {
+ source->queueDiscontinuity(ATSParser::DISCONTINUITY_SEEK, NULL);
+ }
+
+ break;
+ }
+
+ case MyHandler::kWhatNormalPlayTimeMapping:
+ {
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK_LT(trackIndex, mTracks.size());
+
+ uint32_t rtpTime;
+ CHECK(msg->findInt32("rtpTime", (int32_t *)&rtpTime));
+
+ int64_t nptUs;
+ CHECK(msg->findInt64("nptUs", &nptUs));
+
+ TrackInfo *info = &mTracks.editItemAt(trackIndex);
+ info->mRTPTime = rtpTime;
+ info->mNormalPlaytimeUs = nptUs;
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void NuPlayer::RTSPSource::onConnected() {
+ CHECK(mAudioTrack == NULL);
+ CHECK(mVideoTrack == NULL);
+
+ size_t numTracks = mHandler->countTracks();
+ for (size_t i = 0; i < numTracks; ++i) {
+ int32_t timeScale;
+ sp<MetaData> format = mHandler->getTrackFormat(i, &timeScale);
+
+ const char *mime;
+ CHECK(format->findCString(kKeyMIMEType, &mime));
+
+ bool isAudio = !strncasecmp(mime, "audio/", 6);
+ bool isVideo = !strncasecmp(mime, "video/", 6);
+
+ TrackInfo info;
+ info.mTimeScale = timeScale;
+ info.mRTPTime = 0;
+ info.mNormalPlaytimeUs = 0ll;
+
+ if ((isAudio && mAudioTrack == NULL)
+ || (isVideo && mVideoTrack == NULL)) {
+ sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+ if (isAudio) {
+ mAudioTrack = source;
+ } else {
+ mVideoTrack = source;
+ }
+
+ info.mSource = source;
+ }
+
+ mTracks.push(info);
+ }
+
+ mState = CONNECTED;
+}
+
+void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
+ status_t err;
+ CHECK(msg->findInt32("result", &err));
+ CHECK_NE(err, (status_t)OK);
+
+ mLooper->unregisterHandler(mHandler->id());
+ mHandler.clear();
+
+ mState = DISCONNECTED;
+ mFinalResult = err;
+
+ if (mDisconnectReplyID != 0) {
+ finishDisconnectIfPossible();
+ }
+}
+
+void NuPlayer::RTSPSource::finishDisconnectIfPossible() {
+ if (mState != DISCONNECTED) {
+ mHandler->disconnect();
+ return;
+ }
+
+ (new AMessage)->postReply(mDisconnectReplyID);
+ mDisconnectReplyID = 0;
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
new file mode 100644
index 0000000..66eab72
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTSP_SOURCE_H_
+
+#define RTSP_SOURCE_H_
+
+#include "NuPlayerSource.h"
+
+#include <media/stagefright/foundation/AHandlerReflector.h>
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+struct MyHandler;
+
+struct NuPlayer::RTSPSource : public NuPlayer::Source {
+ RTSPSource(
+ const char *url,
+ const KeyedVector<String8, String8> *headers,
+ bool uidValid = false,
+ uid_t uid = 0);
+
+ virtual void start();
+ virtual void stop();
+
+ virtual status_t feedMoreTSData();
+
+ virtual sp<MetaData> getFormat(bool audio);
+ virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+ virtual status_t getDuration(int64_t *durationUs);
+ virtual status_t seekTo(int64_t seekTimeUs);
+ virtual bool isSeekable();
+
+ void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+ virtual ~RTSPSource();
+
+private:
+ enum {
+ kWhatNotify = 'noti',
+ kWhatDisconnect = 'disc',
+ };
+
+ enum State {
+ DISCONNECTED,
+ CONNECTING,
+ CONNECTED,
+ SEEKING,
+ };
+
+ enum Flags {
+ // Don't log any URLs.
+ kFlagIncognito = 1,
+ };
+
+ struct TrackInfo {
+ sp<AnotherPacketSource> mSource;
+
+ int32_t mTimeScale;
+ uint32_t mRTPTime;
+ int64_t mNormalPlaytimeUs;
+ };
+
+ AString mURL;
+ KeyedVector<String8, String8> mExtraHeaders;
+ bool mUIDValid;
+ uid_t mUID;
+ uint32_t mFlags;
+ State mState;
+ status_t mFinalResult;
+ uint32_t mDisconnectReplyID;
+
+ sp<ALooper> mLooper;
+ sp<AHandlerReflector<RTSPSource> > mReflector;
+ sp<MyHandler> mHandler;
+
+ Vector<TrackInfo> mTracks;
+ sp<AnotherPacketSource> mAudioTrack;
+ sp<AnotherPacketSource> mVideoTrack;
+
+ sp<AnotherPacketSource> getSource(bool audio);
+
+ void onConnected();
+ void onDisconnected(const sp<AMessage> &msg);
+ void finishDisconnectIfPossible();
+
+ DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
+};
+
+} // namespace android
+
+#endif // RTSP_SOURCE_H_
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9cb18de..d947760 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -681,6 +681,10 @@ void ACodec::setComponentRole(
static const MimeToRole kMimeToRole[] = {
{ MEDIA_MIMETYPE_AUDIO_MPEG,
"audio_decoder.mp3", "audio_encoder.mp3" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ "audio_decoder.mp1", "audio_encoder.mp1" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ "audio_decoder.mp2", "audio_encoder.mp2" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB,
"audio_decoder.amrnb", "audio_encoder.amrnb" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB,
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 0b1a2af..0aeb515 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -58,7 +58,6 @@ LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/external/flac/include \
$(TOP)/external/tremolo \
- $(TOP)/frameworks/base/media/libstagefright/rtsp \
$(TOP)/external/openssl/include \
LOCAL_SHARED_LIBRARIES := \
@@ -88,7 +87,6 @@ LOCAL_STATIC_LIBRARIES := \
libvpx \
libstagefright_mpeg2ts \
libstagefright_httplive \
- libstagefright_rtsp \
libstagefright_id3 \
libFLAC \
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 1165af5..f6d054d 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -22,7 +22,6 @@
#include <dlfcn.h>
-#include "include/ARTSPController.h"
#include "include/AwesomePlayer.h"
#include "include/DRMExtractor.h"
#include "include/SoftwareRenderer.h"
@@ -53,7 +52,6 @@
#include <gui/SurfaceTextureClient.h>
#include <surfaceflinger/ISurfaceComposer.h>
-#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <cutils/properties.h>
@@ -65,7 +63,6 @@ namespace android {
static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
static int64_t kHighWaterMarkUs = 5000000ll; // 5secs
-static int64_t kHighWaterMarkRTSPUs = 4000000ll; // 4secs
static const size_t kLowWaterMarkBytes = 40000;
static const size_t kHighWaterMarkBytes = 200000;
@@ -227,17 +224,18 @@ AwesomePlayer::~AwesomePlayer() {
mClient.disconnect();
}
-void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) {
+void AwesomePlayer::cancelPlayerEvents(bool keepNotifications) {
mQueue.cancelEvent(mVideoEvent->eventID());
mVideoEventPending = false;
- mQueue.cancelEvent(mStreamDoneEvent->eventID());
- mStreamDoneEventPending = false;
- mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
- mAudioStatusEventPending = false;
mQueue.cancelEvent(mVideoLagEvent->eventID());
mVideoLagEventPending = false;
- if (!keepBufferingGoing) {
+ if (!keepNotifications) {
+ mQueue.cancelEvent(mStreamDoneEvent->eventID());
+ mStreamDoneEventPending = false;
+ mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
+ mAudioStatusEventPending = false;
+
mQueue.cancelEvent(mBufferingEvent->eventID());
mBufferingEventPending = false;
}
@@ -388,10 +386,12 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
+ const char *_mime;
+ CHECK(meta->findCString(kKeyMIMEType, &_mime));
+
+ String8 mime = String8(_mime);
- if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
+ if (!haveVideo && !strncasecmp(mime.string(), "video/", 6)) {
setVideoSource(extractor->getTrack(i));
haveVideo = true;
@@ -412,9 +412,9 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
mStats.mTracks.push();
TrackStat *stat =
&mStats.mTracks.editItemAt(mStats.mVideoTrackIndex);
- stat->mMIME = mime;
+ stat->mMIME = mime.string();
}
- } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
+ } else if (!haveAudio && !strncasecmp(mime.string(), "audio/", 6)) {
setAudioSource(extractor->getTrack(i));
haveAudio = true;
@@ -424,10 +424,10 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
mStats.mTracks.push();
TrackStat *stat =
&mStats.mTracks.editItemAt(mStats.mAudioTrackIndex);
- stat->mMIME = mime;
+ stat->mMIME = mime.string();
}
- if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+ if (!strcasecmp(mime.string(), MEDIA_MIMETYPE_AUDIO_VORBIS)) {
// Only do this for vorbis audio, none of the other audio
// formats even support this ringtone specific hack and
// retrieving the metadata on some extractors may turn out
@@ -439,7 +439,7 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
modifyFlags(AUTO_LOOPING, SET);
}
}
- } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ } else if (!strcasecmp(mime.string(), MEDIA_MIMETYPE_TEXT_3GPP)) {
addTextSource(extractor->getTrack(i));
}
}
@@ -485,9 +485,6 @@ void AwesomePlayer::reset_l() {
if (mConnectingDataSource != NULL) {
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
- } else if (mConnectingRTSPController != NULL) {
- LOGI("interrupting the connection process");
- mConnectingRTSPController->disconnect();
}
if (mFlags & PREPARING_CONNECTED) {
@@ -534,11 +531,6 @@ void AwesomePlayer::reset_l() {
mVideoRenderer.clear();
- if (mRTSPController != NULL) {
- mRTSPController->disconnect();
- mRTSPController.clear();
- }
-
if (mVideoSource != NULL) {
shutdownVideoDecoder_l();
}
@@ -612,10 +604,7 @@ bool AwesomePlayer::getBitrate(int64_t *bitrate) {
bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) {
int64_t bitrate;
- if (mRTSPController != NULL) {
- *durationUs = mRTSPController->getQueueDurationUs(eos);
- return true;
- } else if (mCachedSource != NULL && getBitrate(&bitrate)) {
+ if (mCachedSource != NULL && getBitrate(&bitrate)) {
status_t finalStatus;
size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
*durationUs = cachedDataRemaining * 8000000ll / bitrate;
@@ -751,9 +740,6 @@ void AwesomePlayer::onBufferingUpdate() {
LOGV("cachedDurationUs = %.2f secs, eos=%d",
cachedDurationUs / 1E6, eos);
- int64_t highWaterMarkUs =
- (mRTSPController != NULL) ? kHighWaterMarkRTSPUs : kHighWaterMarkUs;
-
if ((mFlags & PLAYING) && !eos
&& (cachedDurationUs < kLowWaterMarkUs)) {
LOGI("cache is running low (%.2f secs) , pausing.",
@@ -763,7 +749,7 @@ void AwesomePlayer::onBufferingUpdate() {
ensureCacheIsFetching_l();
sendCacheStats();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
- } else if (eos || cachedDurationUs > highWaterMarkUs) {
+ } else if (eos || cachedDurationUs > kHighWaterMarkUs) {
if (mFlags & CACHE_UNDERRUN) {
LOGI("cache has filled up (%.2f secs), resuming.",
cachedDurationUs / 1E6);
@@ -1081,7 +1067,8 @@ void AwesomePlayer::initRenderer_l() {
if (USE_SURFACE_ALLOC
&& !strncmp(component, "OMX.", 4)
- && strncmp(component, "OMX.google.", 11)) {
+ && strncmp(component, "OMX.google.", 11)
+ && strcmp(component, "OMX.Nvidia.mpeg2v.decode")) {
// Hardware decoders avoid the CPU color conversion by decoding
// directly to ANativeBuffers, so we must use a renderer that
// just pushes those buffers to the ANativeWindow.
@@ -1109,7 +1096,7 @@ status_t AwesomePlayer::pause_l(bool at_eos) {
return OK;
}
- cancelPlayerEvents(true /* keepBufferingGoing */);
+ cancelPlayerEvents(true /* keepNotifications */);
if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
if (at_eos) {
@@ -1153,18 +1140,9 @@ bool AwesomePlayer::isPlaying() const {
return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
}
-status_t AwesomePlayer::setSurface(const sp<Surface> &surface) {
- Mutex::Autolock autoLock(mLock);
-
- mSurface = surface;
- return setNativeWindow_l(surface);
-}
-
status_t AwesomePlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
Mutex::Autolock autoLock(mLock);
- mSurface.clear();
-
status_t err;
if (surfaceTexture != NULL) {
err = setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
@@ -1263,10 +1241,7 @@ status_t AwesomePlayer::getDuration(int64_t *durationUs) {
}
status_t AwesomePlayer::getPosition(int64_t *positionUs) {
- if (mRTSPController != NULL) {
- *positionUs = mRTSPController->getNormalPlayTimeUs();
- }
- else if (mSeeking != NO_SEEK) {
+ if (mSeeking != NO_SEEK) {
*positionUs = mSeekTimeUs;
} else if (mVideoSource != NULL
&& (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
@@ -1316,25 +1291,7 @@ status_t AwesomePlayer::setTimedTextTrackIndex(int32_t index) {
}
}
-// static
-void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) {
- static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone();
-}
-
-void AwesomePlayer::onRTSPSeekDone() {
- if (!mSeekNotificationSent) {
- notifyListener_l(MEDIA_SEEK_COMPLETE);
- mSeekNotificationSent = true;
- }
-}
-
status_t AwesomePlayer::seekTo_l(int64_t timeUs) {
- if (mRTSPController != NULL) {
- mSeekNotificationSent = false;
- mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this);
- return OK;
- }
-
if (mFlags & CACHE_UNDERRUN) {
modifyFlags(CACHE_UNDERRUN, CLEAR);
play_l();
@@ -1770,7 +1727,6 @@ void AwesomePlayer::onVideoEvent() {
int64_t latenessUs = nowUs - timeUs;
if (latenessUs > 500000ll
- && mRTSPController == NULL
&& mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(
&realTimeUs, &mediaTimeUs)) {
@@ -2085,34 +2041,6 @@ status_t AwesomePlayer::finishSetDataSource_l() {
return UNKNOWN_ERROR;
}
}
- } else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
- if (mLooper == NULL) {
- mLooper = new ALooper;
- mLooper->setName("rtsp");
- mLooper->start();
- }
- mRTSPController = new ARTSPController(mLooper);
- mConnectingRTSPController = mRTSPController;
-
- if (mUIDValid) {
- mConnectingRTSPController->setUID(mUID);
- }
-
- mLock.unlock();
- status_t err = mRTSPController->connect(mUri.string());
- mLock.lock();
-
- mConnectingRTSPController.clear();
-
- LOGI("ARTSPController::connect returned %d", err);
-
- if (err != OK) {
- mRTSPController.clear();
- return err;
- }
-
- sp<MediaExtractor> extractor = mRTSPController.get();
- return setDataSource_l(extractor);
} else {
dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
}
@@ -2224,7 +2152,7 @@ void AwesomePlayer::onPrepareAsyncEvent() {
modifyFlags(PREPARING_CONNECTED, SET);
- if (isStreamingHTTP() || mRTSPController != NULL) {
+ if (isStreamingHTTP()) {
postBufferingEvent_l();
} else {
finishAsyncPrepare_l();
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 1ba79e5..e4de20a 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -257,6 +257,12 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
mForceRead = false;
*timestampUs =
mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+
+ // Really make sure that this video recording frame will not be dropped.
+ if (*timestampUs < mStartTimeUs) {
+ LOGI("set timestampUs to start time stamp %lld us", mStartTimeUs);
+ *timestampUs = mStartTimeUs;
+ }
return false;
}
}
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index c16b3b5..70523c1 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -20,6 +20,7 @@
#include "include/MPEG4Extractor.h"
#include "include/WAVExtractor.h"
#include "include/OggExtractor.h"
+#include "include/MPEG2PSExtractor.h"
#include "include/MPEG2TSExtractor.h"
#include "include/NuCachedSource2.h"
#include "include/HTTPBase.h"
@@ -113,6 +114,7 @@ void DataSource::RegisterDefaultSniffers() {
RegisterSniffer(SniffMP3);
RegisterSniffer(SniffAAC);
RegisterSniffer(SniffAVI);
+ RegisterSniffer(SniffMPEG2PS);
char value[PROPERTY_VALUE_MAX];
if (property_get("drm.service.enabled", value, NULL)
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 92e84c2..34e9cd7 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -25,11 +25,11 @@
#include "include/VBRISeeker.h"
#include "include/XINGSeeker.h"
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
@@ -289,9 +289,24 @@ MP3Extractor::MP3Extractor(
GetMPEGAudioFrameSize(
header, &frame_size, &sample_rate, &num_channels, &bitrate);
+ unsigned layer = 4 - ((header >> 17) & 3);
+
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+ switch (layer) {
+ case 1:
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I);
+ break;
+ case 2:
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II);
+ break;
+ case 3:
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+ break;
+ default:
+ TRESPASS();
+ }
+
mMeta->setInt32(kKeySampleRate, sample_rate);
mMeta->setInt32(kKeyBitRate, bitrate * 1000);
mMeta->setInt32(kKeyChannelCount, num_channels);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 1e24599..1ebf0a8 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1136,6 +1136,41 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ // @xyz
+ case FOURCC('\xA9', 'x', 'y', 'z'):
+ {
+ // Best case the total data length inside "@xyz" box
+ // would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/",
+ // where "\x00\x04" is the text string length with value = 4,
+ // "\0x15\xc7" is the language code = en, and "0+0" is a
+ // location (string) value with longitude = 0 and latitude = 0.
+ if (chunk_data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ // Worst case the location string length would be 18,
+ // for instance +90.0000-180.0000, without the trailing "/" and
+ // the string length + language code.
+ char buffer[18];
+
+ // Substracting 5 from the data size is because the text string length +
+ // language code takes 4 bytes, and the trailing slash "/" takes 1 byte.
+ off64_t location_length = chunk_data_size - 5;
+ if (location_length >= (off64_t) sizeof(buffer)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (mDataSource->readAt(
+ data_offset + 4, buffer, location_length) < location_length) {
+ return ERROR_IO;
+ }
+
+ buffer[location_length] = '\0';
+ mFileMetaData->setCString(kKeyLocation, buffer);
+ *offset += chunk_size;
+ break;
+ }
+
case FOURCC('e', 's', 'd', 's'):
{
if (chunk_data_size < 4) {
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 01f1fba..444e823 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -30,6 +30,8 @@ const char *MEDIA_MIMETYPE_VIDEO_RAW = "video/raw";
const char *MEDIA_MIMETYPE_AUDIO_AMR_NB = "audio/3gpp";
const char *MEDIA_MIMETYPE_AUDIO_AMR_WB = "audio/amr-wb";
const char *MEDIA_MIMETYPE_AUDIO_MPEG = "audio/mpeg";
+const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I = "audio/mpeg-L1";
+const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2";
const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp";
const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
@@ -45,6 +47,7 @@ const char *MEDIA_MIMETYPE_CONTAINER_OGG = "application/ogg";
const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
const char *MEDIA_MIMETYPE_CONTAINER_AVI = "video/avi";
+const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS = "video/mp2p";
const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index a8023df..2221268 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -24,6 +24,7 @@
#include "include/MPEG4Extractor.h"
#include "include/WAVExtractor.h"
#include "include/OggExtractor.h"
+#include "include/MPEG2PSExtractor.h"
#include "include/MPEG2TSExtractor.h"
#include "include/DRMExtractor.h"
#include "include/WVMExtractor.h"
@@ -115,6 +116,8 @@ sp<MediaExtractor> MediaExtractor::Create(
ret = new WVMExtractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC_ADTS)) {
ret = new AACExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
+ ret = new MPEG2PSExtractor(source);
}
if (ret != NULL) {
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index b20bfcb..dfd3f4a 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -109,6 +109,7 @@ static const CodecInfo kDecoderInfo[] = {
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.google.mp3.decoder" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, "OMX.Nvidia.mp2.decoder" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },
@@ -1471,7 +1472,9 @@ OMXCodec::OMXCodec(
mOutputPortSettingsChangedPending(false),
mLeftOverBuffer(NULL),
mPaused(false),
- mNativeWindow(!strncmp(componentName, "OMX.google.", 11)
+ mNativeWindow(
+ (!strncmp(componentName, "OMX.google.", 11)
+ || !strcmp(componentName, "OMX.Nvidia.mpeg2v.decode"))
? NULL : nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1492,6 +1495,12 @@ void OMXCodec::setComponentRole(
static const MimeToRole kMimeToRole[] = {
{ MEDIA_MIMETYPE_AUDIO_MPEG,
"audio_decoder.mp3", "audio_encoder.mp3" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ "audio_decoder.mp1", "audio_encoder.mp1" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ "audio_decoder.mp2", "audio_encoder.mp2" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG,
+ "audio_decoder.mp3", "audio_encoder.mp3" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB,
"audio_decoder.amrnb", "audio_encoder.amrnb" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB,
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 571e8be..bb6e4cd 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -38,7 +38,7 @@ static bool FileHasAcceptableExtension(const char *extension) {
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
".mkv", ".mka", ".webm", ".ts", ".fl", ".flac", ".mxmf",
- ".avi",
+ ".avi", ".mpeg", ".mpg"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index c74cb5a..4491c97 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -418,6 +418,7 @@ void StagefrightMetadataRetriever::parseMetaData() {
{ kKeyYear, METADATA_KEY_YEAR },
{ kKeyWriter, METADATA_KEY_WRITER },
{ kKeyCompilation, METADATA_KEY_COMPILATION },
+ { kKeyLocation, METADATA_KEY_LOCATION },
};
static const size_t kNumMapEntries = sizeof(kMap) / sizeof(kMap[0]);
diff --git a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
index 94a79ab..d361ef4 100644
--- a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
@@ -82,7 +82,11 @@ status_t AMRNBEncoder::start(MetaData *params) {
&mEncState, &mSidState, false /* dtx_enable */),
0);
- mSource->start(params);
+ status_t err = mSource->start(params);
+ if (err != OK) {
+ LOGE("AudioSource is not available");
+ return err;
+ }
mAnchorTimeUs = 0;
mNumFramesOutput = 0;
diff --git a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
index 002f055..5eacc16 100644
--- a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
+++ b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
@@ -137,8 +137,12 @@ status_t AMRWBEncoder::start(MetaData *params) {
CHECK_EQ(OK, initCheck());
mNumFramesOutput = 0;
- mSource->start(params);
+ status_t err = mSource->start(params);
+ if (err != OK) {
+ LOGE("AudioSource is not available");
+ return err;
+ }
mStarted = true;
return OK;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index ddced5f..aa07e57 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -421,8 +421,13 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
int32_t bufferSize = inHeader->nFilledLen;
+ // The PV decoder is lying to us, sometimes it'll claim to only have
+ // consumed a subset of the buffer when it clearly consumed all of it.
+ // ignore whatever it says...
+ int32_t tmp = bufferSize;
+
if (PVDecodeVideoFrame(
- mHandle, &bitstream, &timestamp, &bufferSize,
+ mHandle, &bitstream, &timestamp, &tmp,
&useExtTimestamp,
outHeader->pBuffer) != PV_TRUE) {
LOGE("failed to decode video frame.");
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 740c957..dede3ac 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -76,7 +76,8 @@ SoftAVC::SoftAVC(
mPicId(0),
mHeadersDecoded(false),
mEOSStatus(INPUT_DATA_AVAILABLE),
- mOutputPortSettingsChange(NONE) {
+ mOutputPortSettingsChange(NONE),
+ mSignalledError(false) {
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
}
@@ -287,7 +288,7 @@ OMX_ERRORTYPE SoftAVC::getConfig(
}
void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
- if (mOutputPortSettingsChange != NONE) {
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -298,7 +299,6 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
H264SwDecRet ret = H264SWDEC_PIC_RDY;
- status_t err = OK;
bool portSettingsChanged = false;
while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
&& outQueue.size() == kNumOutputBuffers) {
@@ -372,7 +372,12 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
inPicture.dataLen = 0;
if (ret < 0) {
LOGE("Decoder failed: %d", ret);
- err = ERROR_MALFORMED;
+
+ notify(OMX_EventError, OMX_ErrorUndefined,
+ ERROR_MALFORMED, NULL);
+
+ mSignalledError = true;
+ return;
}
}
}
@@ -400,10 +405,6 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
drainOneOutputBuffer(picId, data);
}
-
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
- }
}
}
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index 1cc85e8..879b014 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -88,6 +88,8 @@ private:
};
OutputPortSettingChange mOutputPortSettingsChange;
+ bool mSignalledError;
+
void initPorts();
status_t initDecoder();
void updatePortDefinitions();
diff --git a/media/libstagefright/include/ARTSPController.h b/media/libstagefright/include/ARTSPController.h
deleted file mode 100644
index 2bd5be6..0000000
--- a/media/libstagefright/include/ARTSPController.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef A_RTSP_CONTROLLER_H_
-
-#define A_RTSP_CONTROLLER_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AHandlerReflector.h>
-#include <media/stagefright/MediaExtractor.h>
-
-namespace android {
-
-struct ALooper;
-struct MyHandler;
-
-struct ARTSPController : public MediaExtractor {
- ARTSPController(const sp<ALooper> &looper);
-
- void setUID(uid_t uid);
-
- status_t connect(const char *url);
- void disconnect();
-
- void seekAsync(int64_t timeUs, void (*seekDoneCb)(void *), void *cookie);
-
- virtual size_t countTracks();
- virtual sp<MediaSource> getTrack(size_t index);
-
- virtual sp<MetaData> getTrackMetaData(
- size_t index, uint32_t flags);
-
- int64_t getNormalPlayTimeUs();
- int64_t getQueueDurationUs(bool *eos);
-
- void onMessageReceived(const sp<AMessage> &msg);
-
- virtual uint32_t flags() const {
- // Seeking 10secs forward or backward is a very expensive operation
- // for rtsp, so let's not enable that.
- // The user can always use the seek bar.
-
- return CAN_PAUSE | CAN_SEEK;
- }
-
-protected:
- virtual ~ARTSPController();
-
-private:
- enum {
- kWhatConnectDone = 'cdon',
- kWhatDisconnectDone = 'ddon',
- kWhatSeekDone = 'sdon',
- };
-
- enum State {
- DISCONNECTED,
- CONNECTED,
- CONNECTING,
- };
-
- Mutex mLock;
- Condition mCondition;
-
- State mState;
- status_t mConnectionResult;
-
- sp<ALooper> mLooper;
- sp<MyHandler> mHandler;
- sp<AHandlerReflector<ARTSPController> > mReflector;
-
- bool mUIDValid;
- uid_t mUID;
-
- void (*mSeekDoneCb)(void *);
- void *mSeekDoneCookie;
- int64_t mLastSeekCompletedTimeUs;
-
- DISALLOW_EVIL_CONSTRUCTORS(ARTSPController);
-};
-
-} // namespace android
-
-#endif // A_RTSP_CONTROLLER_H_
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 8e73121..0985f47 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -38,9 +38,6 @@ struct MediaSource;
struct NuCachedSource2;
struct ISurfaceTexture;
-struct ALooper;
-struct ARTSPController;
-
class DrmManagerClinet;
class DecryptHandle;
@@ -84,7 +81,6 @@ struct AwesomePlayer {
bool isPlaying() const;
- status_t setSurface(const sp<Surface> &surface);
status_t setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
@@ -157,7 +153,6 @@ private:
bool mUIDValid;
uid_t mUID;
- sp<Surface> mSurface;
sp<ANativeWindow> mNativeWindow;
sp<MediaPlayerBase::AudioSink> mAudioSink;
@@ -233,10 +228,6 @@ private:
sp<HTTPBase> mConnectingDataSource;
sp<NuCachedSource2> mCachedSource;
- sp<ALooper> mLooper;
- sp<ARTSPController> mRTSPController;
- sp<ARTSPController> mConnectingRTSPController;
-
DrmManagerClient *mDrmManagerClient;
sp<DecryptHandle> mDecryptHandle;
@@ -259,7 +250,7 @@ private:
void notifyVideoSize_l();
void seekAudioIfNecessary_l();
- void cancelPlayerEvents(bool keepBufferingGoing = false);
+ void cancelPlayerEvents(bool keepNotifications = false);
void setAudioSource(sp<MediaSource> source);
status_t initAudioDecoder();
@@ -287,9 +278,6 @@ private:
static bool ContinuePreparation(void *cookie);
- static void OnRTSPSeekDoneWrapper(void *cookie);
- void onRTSPSeekDone();
-
bool getBitrate(int64_t *bitrate);
void finishSeekIfNecessary(int64_t videoTimeUs);
diff --git a/media/libstagefright/include/MPEG2PSExtractor.h b/media/libstagefright/include/MPEG2PSExtractor.h
new file mode 100644
index 0000000..fb76564
--- /dev/null
+++ b/media/libstagefright/include/MPEG2PSExtractor.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MPEG2_PS_EXTRACTOR_H_
+
+#define MPEG2_PS_EXTRACTOR_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <utils/threads.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+struct Track;
+struct String8;
+
+struct MPEG2PSExtractor : public MediaExtractor {
+ MPEG2PSExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+ virtual sp<MetaData> getMetaData();
+
+ virtual uint32_t flags() const;
+
+protected:
+ virtual ~MPEG2PSExtractor();
+
+private:
+ struct Track;
+ struct WrappedTrack;
+
+ mutable Mutex mLock;
+ sp<DataSource> mDataSource;
+
+ off64_t mOffset;
+ status_t mFinalResult;
+ sp<ABuffer> mBuffer;
+ KeyedVector<unsigned, sp<Track> > mTracks;
+ bool mScanning;
+
+ bool mProgramStreamMapValid;
+ KeyedVector<unsigned, unsigned> mStreamTypeByESID;
+
+ status_t feedMore();
+
+ status_t dequeueChunk();
+ ssize_t dequeuePack();
+ ssize_t dequeueSystemHeader();
+ ssize_t dequeuePES();
+
+ DISALLOW_EVIL_CONSTRUCTORS(MPEG2PSExtractor);
+};
+
+bool SniffMPEG2PS(
+ const sp<DataSource> &source, String8 *mimeType, float *confidence,
+ sp<AMessage> *);
+
+} // namespace android
+
+#endif // MPEG2_PS_EXTRACTOR_H_
+
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 388cb54..878e534 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -64,12 +64,9 @@ struct ATSParser : public RefBase {
bool PTSTimeDeltaEstablished();
-protected:
- virtual ~ATSParser();
-
-private:
enum {
// From ISO/IEC 13818-1: 2000 (E), Table 2-29
+ STREAMTYPE_RESERVED = 0x00,
STREAMTYPE_MPEG1_VIDEO = 0x01,
STREAMTYPE_MPEG2_VIDEO = 0x02,
STREAMTYPE_MPEG1_AUDIO = 0x03,
@@ -79,6 +76,10 @@ private:
STREAMTYPE_H264 = 0x1b,
};
+protected:
+ virtual ~ATSParser();
+
+private:
struct Program;
struct Stream;
diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk
index 4a30416..578c669 100644
--- a/media/libstagefright/mpeg2ts/Android.mk
+++ b/media/libstagefright/mpeg2ts/Android.mk
@@ -6,6 +6,7 @@ LOCAL_SRC_FILES:= \
AnotherPacketSource.cpp \
ATSParser.cpp \
ESQueue.cpp \
+ MPEG2PSExtractor.cpp \
MPEG2TSExtractor.cpp \
LOCAL_C_INCLUDES:= \
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index a56da36..b9a4826 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -585,6 +585,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {
return NULL;
}
+ unsigned layer = 4 - ((header >> 17) & 3);
+
sp<ABuffer> accessUnit = new ABuffer(frameSize);
memcpy(accessUnit->data(), data, frameSize);
@@ -601,7 +603,24 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {
if (mFormat == NULL) {
mFormat = new MetaData;
- mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+
+ switch (layer) {
+ case 1:
+ mFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I);
+ break;
+ case 2:
+ mFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II);
+ break;
+ case 3:
+ mFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+ break;
+ default:
+ TRESPASS();
+ }
+
mFormat->setInt32(kKeySampleRate, samplingRate);
mFormat->setInt32(kKeyChannelCount, numChannels);
}
diff --git a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
new file mode 100644
index 0000000..f55be6e
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
@@ -0,0 +1,715 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MPEG2PSExtractor"
+#include <utils/Log.h>
+
+#include "include/MPEG2PSExtractor.h"
+
+#include "AnotherPacketSource.h"
+#include "ESQueue.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct MPEG2PSExtractor::Track : public MediaSource {
+ Track(MPEG2PSExtractor *extractor,
+ unsigned stream_id, unsigned stream_type);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options);
+
+protected:
+ virtual ~Track();
+
+private:
+ friend struct MPEG2PSExtractor;
+
+ MPEG2PSExtractor *mExtractor;
+
+ unsigned mStreamID;
+ unsigned mStreamType;
+ ElementaryStreamQueue *mQueue;
+ sp<AnotherPacketSource> mSource;
+
+ status_t appendPESData(
+ unsigned PTS_DTS_flags,
+ uint64_t PTS, uint64_t DTS,
+ const uint8_t *data, size_t size);
+
+ DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+struct MPEG2PSExtractor::WrappedTrack : public MediaSource {
+ WrappedTrack(const sp<MPEG2PSExtractor> &extractor, const sp<Track> &track);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options);
+
+protected:
+ virtual ~WrappedTrack();
+
+private:
+ sp<MPEG2PSExtractor> mExtractor;
+ sp<MPEG2PSExtractor::Track> mTrack;
+
+ DISALLOW_EVIL_CONSTRUCTORS(WrappedTrack);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::MPEG2PSExtractor(const sp<DataSource> &source)
+ : mDataSource(source),
+ mOffset(0),
+ mFinalResult(OK),
+ mBuffer(new ABuffer(0)),
+ mScanning(true),
+ mProgramStreamMapValid(false) {
+ for (size_t i = 0; i < 500; ++i) {
+ if (feedMore() != OK) {
+ break;
+ }
+ }
+
+ // Remove all tracks that were unable to determine their format.
+ for (size_t i = mTracks.size(); i-- > 0;) {
+ if (mTracks.valueAt(i)->getFormat() == NULL) {
+ mTracks.removeItemsAt(i);
+ }
+ }
+
+ mScanning = false;
+}
+
+MPEG2PSExtractor::~MPEG2PSExtractor() {
+}
+
+size_t MPEG2PSExtractor::countTracks() {
+ return mTracks.size();
+}
+
+sp<MediaSource> MPEG2PSExtractor::getTrack(size_t index) {
+ if (index >= mTracks.size()) {
+ return NULL;
+ }
+
+ return new WrappedTrack(this, mTracks.valueAt(index));
+}
+
+sp<MetaData> MPEG2PSExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+ if (index >= mTracks.size()) {
+ return NULL;
+ }
+
+ return mTracks.valueAt(index)->getFormat();
+}
+
+sp<MetaData> MPEG2PSExtractor::getMetaData() {
+ sp<MetaData> meta = new MetaData;
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG2PS);
+
+ return meta;
+}
+
+uint32_t MPEG2PSExtractor::flags() const {
+ return CAN_PAUSE;
+}
+
+status_t MPEG2PSExtractor::feedMore() {
+ Mutex::Autolock autoLock(mLock);
+
+ // How much data we're reading at a time
+ static const size_t kChunkSize = 8192;
+
+ for (;;) {
+ status_t err = dequeueChunk();
+
+ if (err == -EAGAIN && mFinalResult == OK) {
+ memmove(mBuffer->base(), mBuffer->data(), mBuffer->size());
+ mBuffer->setRange(0, mBuffer->size());
+
+ if (mBuffer->size() + kChunkSize > mBuffer->capacity()) {
+ size_t newCapacity = mBuffer->capacity() + kChunkSize;
+ sp<ABuffer> newBuffer = new ABuffer(newCapacity);
+ memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
+ newBuffer->setRange(0, mBuffer->size());
+ mBuffer = newBuffer;
+ }
+
+ ssize_t n = mDataSource->readAt(
+ mOffset, mBuffer->data() + mBuffer->size(), kChunkSize);
+
+ if (n < (ssize_t)kChunkSize) {
+ mFinalResult = (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
+ return mFinalResult;
+ }
+
+ mBuffer->setRange(mBuffer->offset(), mBuffer->size() + n);
+ mOffset += n;
+ } else if (err != OK) {
+ mFinalResult = err;
+ return err;
+ } else {
+ return OK;
+ }
+ }
+}
+
+status_t MPEG2PSExtractor::dequeueChunk() {
+ if (mBuffer->size() < 4) {
+ return -EAGAIN;
+ }
+
+ if (memcmp("\x00\x00\x01", mBuffer->data(), 3)) {
+ return ERROR_MALFORMED;
+ }
+
+ unsigned chunkType = mBuffer->data()[3];
+
+ ssize_t res;
+
+ switch (chunkType) {
+ case 0xba:
+ {
+ res = dequeuePack();
+ break;
+ }
+
+ case 0xbb:
+ {
+ res = dequeueSystemHeader();
+ break;
+ }
+
+ default:
+ {
+ res = dequeuePES();
+ break;
+ }
+ }
+
+ if (res > 0) {
+ if (mBuffer->size() < (size_t)res) {
+ return -EAGAIN;
+ }
+
+ mBuffer->setRange(mBuffer->offset() + res, mBuffer->size() - res);
+ res = OK;
+ }
+
+ return res;
+}
+
+ssize_t MPEG2PSExtractor::dequeuePack() {
+ // 32 + 2 + 3 + 1 + 15 + 1 + 15+ 1 + 9 + 1 + 22 + 1 + 1 | +5
+
+ if (mBuffer->size() < 14) {
+ return -EAGAIN;
+ }
+
+ unsigned pack_stuffing_length = mBuffer->data()[13] & 7;
+
+ return pack_stuffing_length + 14;
+}
+
+ssize_t MPEG2PSExtractor::dequeueSystemHeader() {
+ if (mBuffer->size() < 6) {
+ return -EAGAIN;
+ }
+
+ unsigned header_length = U16_AT(mBuffer->data() + 4);
+
+ return header_length + 6;
+}
+
+ssize_t MPEG2PSExtractor::dequeuePES() {
+ if (mBuffer->size() < 6) {
+ return -EAGAIN;
+ }
+
+ unsigned PES_packet_length = U16_AT(mBuffer->data() + 4);
+ CHECK_NE(PES_packet_length, 0u);
+
+ size_t n = PES_packet_length + 6;
+
+ if (mBuffer->size() < n) {
+ return -EAGAIN;
+ }
+
+ ABitReader br(mBuffer->data(), n);
+
+ unsigned packet_startcode_prefix = br.getBits(24);
+
+ LOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
+
+ if (packet_startcode_prefix != 1) {
+ LOGV("Supposedly payload_unit_start=1 unit does not start "
+ "with startcode.");
+
+ return ERROR_MALFORMED;
+ }
+
+ CHECK_EQ(packet_startcode_prefix, 0x000001u);
+
+ unsigned stream_id = br.getBits(8);
+ LOGV("stream_id = 0x%02x", stream_id);
+
+ /* unsigned PES_packet_length = */br.getBits(16);
+
+ if (stream_id == 0xbc) {
+ // program_stream_map
+
+ if (!mScanning) {
+ return n;
+ }
+
+ mStreamTypeByESID.clear();
+
+ /* unsigned current_next_indicator = */br.getBits(1);
+ /* unsigned reserved = */br.getBits(2);
+ /* unsigned program_stream_map_version = */br.getBits(5);
+ /* unsigned reserved = */br.getBits(7);
+ /* unsigned marker_bit = */br.getBits(1);
+ unsigned program_stream_info_length = br.getBits(16);
+
+ size_t offset = 0;
+ while (offset < program_stream_info_length) {
+ if (offset + 2 > program_stream_info_length) {
+ return ERROR_MALFORMED;
+ }
+
+ unsigned descriptor_tag = br.getBits(8);
+ unsigned descriptor_length = br.getBits(8);
+
+ LOGI("found descriptor tag 0x%02x of length %u",
+ descriptor_tag, descriptor_length);
+
+ if (offset + 2 + descriptor_length > program_stream_info_length) {
+ return ERROR_MALFORMED;
+ }
+
+ br.skipBits(8 * descriptor_length);
+
+ offset += 2 + descriptor_length;
+ }
+
+ unsigned elementary_stream_map_length = br.getBits(16);
+
+ offset = 0;
+ while (offset < elementary_stream_map_length) {
+ if (offset + 4 > elementary_stream_map_length) {
+ return ERROR_MALFORMED;
+ }
+
+ unsigned stream_type = br.getBits(8);
+ unsigned elementary_stream_id = br.getBits(8);
+
+ LOGI("elementary stream id 0x%02x has stream type 0x%02x",
+ elementary_stream_id, stream_type);
+
+ mStreamTypeByESID.add(elementary_stream_id, stream_type);
+
+ unsigned elementary_stream_info_length = br.getBits(16);
+
+ if (offset + 4 + elementary_stream_info_length
+ > elementary_stream_map_length) {
+ return ERROR_MALFORMED;
+ }
+
+ offset += 4 + elementary_stream_info_length;
+ }
+
+ /* unsigned CRC32 = */br.getBits(32);
+
+ mProgramStreamMapValid = true;
+ } else if (stream_id != 0xbe // padding_stream
+ && stream_id != 0xbf // private_stream_2
+ && stream_id != 0xf0 // ECM
+ && stream_id != 0xf1 // EMM
+ && stream_id != 0xff // program_stream_directory
+ && stream_id != 0xf2 // DSMCC
+ && stream_id != 0xf8) { // H.222.1 type E
+ CHECK_EQ(br.getBits(2), 2u);
+
+ /* unsigned PES_scrambling_control = */br.getBits(2);
+ /* unsigned PES_priority = */br.getBits(1);
+ /* unsigned data_alignment_indicator = */br.getBits(1);
+ /* unsigned copyright = */br.getBits(1);
+ /* unsigned original_or_copy = */br.getBits(1);
+
+ unsigned PTS_DTS_flags = br.getBits(2);
+ LOGV("PTS_DTS_flags = %u", PTS_DTS_flags);
+
+ unsigned ESCR_flag = br.getBits(1);
+ LOGV("ESCR_flag = %u", ESCR_flag);
+
+ unsigned ES_rate_flag = br.getBits(1);
+ LOGV("ES_rate_flag = %u", ES_rate_flag);
+
+ unsigned DSM_trick_mode_flag = br.getBits(1);
+ LOGV("DSM_trick_mode_flag = %u", DSM_trick_mode_flag);
+
+ unsigned additional_copy_info_flag = br.getBits(1);
+ LOGV("additional_copy_info_flag = %u", additional_copy_info_flag);
+
+ /* unsigned PES_CRC_flag = */br.getBits(1);
+ /* PES_extension_flag = */br.getBits(1);
+
+ unsigned PES_header_data_length = br.getBits(8);
+ LOGV("PES_header_data_length = %u", PES_header_data_length);
+
+ unsigned optional_bytes_remaining = PES_header_data_length;
+
+ uint64_t PTS = 0, DTS = 0;
+
+ if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
+ CHECK_GE(optional_bytes_remaining, 5u);
+
+ CHECK_EQ(br.getBits(4), PTS_DTS_flags);
+
+ PTS = ((uint64_t)br.getBits(3)) << 30;
+ CHECK_EQ(br.getBits(1), 1u);
+ PTS |= ((uint64_t)br.getBits(15)) << 15;
+ CHECK_EQ(br.getBits(1), 1u);
+ PTS |= br.getBits(15);
+ CHECK_EQ(br.getBits(1), 1u);
+
+ LOGV("PTS = %llu", PTS);
+ // LOGI("PTS = %.2f secs", PTS / 90000.0f);
+
+ optional_bytes_remaining -= 5;
+
+ if (PTS_DTS_flags == 3) {
+ CHECK_GE(optional_bytes_remaining, 5u);
+
+ CHECK_EQ(br.getBits(4), 1u);
+
+ DTS = ((uint64_t)br.getBits(3)) << 30;
+ CHECK_EQ(br.getBits(1), 1u);
+ DTS |= ((uint64_t)br.getBits(15)) << 15;
+ CHECK_EQ(br.getBits(1), 1u);
+ DTS |= br.getBits(15);
+ CHECK_EQ(br.getBits(1), 1u);
+
+ LOGV("DTS = %llu", DTS);
+
+ optional_bytes_remaining -= 5;
+ }
+ }
+
+ if (ESCR_flag) {
+ CHECK_GE(optional_bytes_remaining, 6u);
+
+ br.getBits(2);
+
+ uint64_t ESCR = ((uint64_t)br.getBits(3)) << 30;
+ CHECK_EQ(br.getBits(1), 1u);
+ ESCR |= ((uint64_t)br.getBits(15)) << 15;
+ CHECK_EQ(br.getBits(1), 1u);
+ ESCR |= br.getBits(15);
+ CHECK_EQ(br.getBits(1), 1u);
+
+ LOGV("ESCR = %llu", ESCR);
+ /* unsigned ESCR_extension = */br.getBits(9);
+
+ CHECK_EQ(br.getBits(1), 1u);
+
+ optional_bytes_remaining -= 6;
+ }
+
+ if (ES_rate_flag) {
+ CHECK_GE(optional_bytes_remaining, 3u);
+
+ CHECK_EQ(br.getBits(1), 1u);
+ /* unsigned ES_rate = */br.getBits(22);
+ CHECK_EQ(br.getBits(1), 1u);
+
+ optional_bytes_remaining -= 3;
+ }
+
+ br.skipBits(optional_bytes_remaining * 8);
+
+ // ES data follows.
+
+ CHECK_GE(PES_packet_length, PES_header_data_length + 3);
+
+ unsigned dataLength =
+ PES_packet_length - 3 - PES_header_data_length;
+
+ if (br.numBitsLeft() < dataLength * 8) {
+ LOGE("PES packet does not carry enough data to contain "
+ "payload. (numBitsLeft = %d, required = %d)",
+ br.numBitsLeft(), dataLength * 8);
+
+ return ERROR_MALFORMED;
+ }
+
+ CHECK_GE(br.numBitsLeft(), dataLength * 8);
+
+ ssize_t index = mTracks.indexOfKey(stream_id);
+ if (index < 0 && mScanning) {
+ unsigned streamType;
+
+ ssize_t streamTypeIndex;
+ if (mProgramStreamMapValid
+ && (streamTypeIndex =
+ mStreamTypeByESID.indexOfKey(stream_id)) >= 0) {
+ streamType = mStreamTypeByESID.valueAt(streamTypeIndex);
+ } else if ((stream_id & ~0x1f) == 0xc0) {
+ // ISO/IEC 13818-3 or ISO/IEC 11172-3 or ISO/IEC 13818-7
+ // or ISO/IEC 14496-3 audio
+ streamType = ATSParser::STREAMTYPE_MPEG2_AUDIO;
+ } else if ((stream_id & ~0x0f) == 0xe0) {
+ // ISO/IEC 13818-2 or ISO/IEC 11172-2 or ISO/IEC 14496-2 video
+ streamType = ATSParser::STREAMTYPE_MPEG2_VIDEO;
+ } else {
+ streamType = ATSParser::STREAMTYPE_RESERVED;
+ }
+
+ index = mTracks.add(
+ stream_id, new Track(this, stream_id, streamType));
+ }
+
+ status_t err = OK;
+
+ if (index >= 0) {
+ err =
+ mTracks.editValueAt(index)->appendPESData(
+ PTS_DTS_flags, PTS, DTS, br.data(), dataLength);
+ }
+
+ br.skipBits(dataLength * 8);
+
+ if (err != OK) {
+ return err;
+ }
+ } else if (stream_id == 0xbe) { // padding_stream
+ CHECK_NE(PES_packet_length, 0u);
+ br.skipBits(PES_packet_length * 8);
+ } else {
+ CHECK_NE(PES_packet_length, 0u);
+ br.skipBits(PES_packet_length * 8);
+ }
+
+ return n;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::Track::Track(
+ MPEG2PSExtractor *extractor, unsigned stream_id, unsigned stream_type)
+ : mExtractor(extractor),
+ mStreamID(stream_id),
+ mStreamType(stream_type),
+ mQueue(NULL) {
+ bool supported = true;
+ ElementaryStreamQueue::Mode mode;
+
+ switch (mStreamType) {
+ case ATSParser::STREAMTYPE_H264:
+ mode = ElementaryStreamQueue::H264;
+ break;
+ case ATSParser::STREAMTYPE_MPEG2_AUDIO_ATDS:
+ mode = ElementaryStreamQueue::AAC;
+ break;
+ case ATSParser::STREAMTYPE_MPEG1_AUDIO:
+ case ATSParser::STREAMTYPE_MPEG2_AUDIO:
+ mode = ElementaryStreamQueue::MPEG_AUDIO;
+ break;
+
+ case ATSParser::STREAMTYPE_MPEG1_VIDEO:
+ case ATSParser::STREAMTYPE_MPEG2_VIDEO:
+ mode = ElementaryStreamQueue::MPEG_VIDEO;
+ break;
+
+ case ATSParser::STREAMTYPE_MPEG4_VIDEO:
+ mode = ElementaryStreamQueue::MPEG4_VIDEO;
+ break;
+
+ default:
+ supported = false;
+ break;
+ }
+
+ if (supported) {
+ mQueue = new ElementaryStreamQueue(mode);
+ } else {
+ LOGI("unsupported stream ID 0x%02x", stream_id);
+ }
+}
+
+MPEG2PSExtractor::Track::~Track() {
+ delete mQueue;
+ mQueue = NULL;
+}
+
+status_t MPEG2PSExtractor::Track::start(MetaData *params) {
+ if (mSource == NULL) {
+ return NO_INIT;
+ }
+
+ return mSource->start(params);
+}
+
+status_t MPEG2PSExtractor::Track::stop() {
+ if (mSource == NULL) {
+ return NO_INIT;
+ }
+
+ return mSource->stop();
+}
+
+sp<MetaData> MPEG2PSExtractor::Track::getFormat() {
+ if (mSource == NULL) {
+ return NULL;
+ }
+
+ return mSource->getFormat();
+}
+
+status_t MPEG2PSExtractor::Track::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ if (mSource == NULL) {
+ return NO_INIT;
+ }
+
+ status_t finalResult;
+ while (!mSource->hasBufferAvailable(&finalResult)) {
+ if (finalResult != OK) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ status_t err = mExtractor->feedMore();
+
+ if (err != OK) {
+ mSource->signalEOS(err);
+ }
+ }
+
+ return mSource->read(buffer, options);
+}
+
+status_t MPEG2PSExtractor::Track::appendPESData(
+ unsigned PTS_DTS_flags,
+ uint64_t PTS, uint64_t DTS,
+ const uint8_t *data, size_t size) {
+ if (mQueue == NULL) {
+ return OK;
+ }
+
+ int64_t timeUs;
+ if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
+ timeUs = (PTS * 100) / 9;
+ } else {
+ timeUs = 0;
+ }
+
+ status_t err = mQueue->appendData(data, size, timeUs);
+
+ if (err != OK) {
+ return err;
+ }
+
+ sp<ABuffer> accessUnit;
+ while ((accessUnit = mQueue->dequeueAccessUnit()) != NULL) {
+ if (mSource == NULL) {
+ sp<MetaData> meta = mQueue->getFormat();
+
+ if (meta != NULL) {
+ LOGV("Stream ID 0x%02x now has data.", mStreamID);
+
+ mSource = new AnotherPacketSource(meta);
+ mSource->queueAccessUnit(accessUnit);
+ }
+ } else if (mQueue->getFormat() != NULL) {
+ mSource->queueAccessUnit(accessUnit);
+ }
+ }
+
+ return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::WrappedTrack::WrappedTrack(
+ const sp<MPEG2PSExtractor> &extractor, const sp<Track> &track)
+ : mExtractor(extractor),
+ mTrack(track) {
+}
+
+MPEG2PSExtractor::WrappedTrack::~WrappedTrack() {
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::start(MetaData *params) {
+ return mTrack->start(params);
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::stop() {
+ return mTrack->stop();
+}
+
+sp<MetaData> MPEG2PSExtractor::WrappedTrack::getFormat() {
+ return mTrack->getFormat();
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ return mTrack->read(buffer, options);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+bool SniffMPEG2PS(
+ const sp<DataSource> &source, String8 *mimeType, float *confidence,
+ sp<AMessage> *) {
+ uint8_t header[5];
+ if (source->readAt(0, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+ return false;
+ }
+
+ if (memcmp("\x00\x00\x01\xba", header, 4) || (header[4] >> 6) != 1) {
+ return false;
+ }
+
+ *confidence = 0.25f; // Slightly larger than .mp3 extractor's confidence
+
+ mimeType->setTo(MEDIA_MIMETYPE_CONTAINER_MPEG2PS);
+
+ return true;
+}
+
+} // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 4ecb92f..3f4cdb5 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -34,8 +34,8 @@
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <utils/Vector.h>
@@ -402,43 +402,15 @@ static sp<ABuffer> MakeMPEG4VideoCodecSpecificData(
return csd;
}
-static bool GetClockRate(const AString &desc, uint32_t *clockRate) {
- ssize_t slashPos = desc.find("/");
- if (slashPos < 0) {
- return false;
- }
-
- const char *s = desc.c_str() + slashPos + 1;
-
- char *end;
- unsigned long x = strtoul(s, &end, 10);
-
- if (end == s || (*end != '\0' && *end != '/')) {
- return false;
- }
-
- *clockRate = x;
-
- return true;
-}
-
APacketSource::APacketSource(
const sp<ASessionDescription> &sessionDesc, size_t index)
: mInitCheck(NO_INIT),
- mFormat(new MetaData),
- mEOSResult(OK),
- mIsAVC(false),
- mScanForIDR(true),
- mRTPTimeBase(0),
- mNormalPlayTimeBaseUs(0),
- mLastNormalPlayTimeUs(0) {
+ mFormat(new MetaData) {
unsigned long PT;
AString desc;
AString params;
sessionDesc->getFormatType(index, &PT, &desc, &params);
- CHECK(GetClockRate(desc, &mClockRate));
-
int64_t durationUs;
if (sessionDesc->getDurationUs(&durationUs)) {
mFormat->setInt64(kKeyDuration, durationUs);
@@ -448,8 +420,6 @@ APacketSource::APacketSource(
mInitCheck = OK;
if (!strncmp(desc.c_str(), "H264/", 5)) {
- mIsAVC = true;
-
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
int32_t width, height;
@@ -602,137 +572,8 @@ status_t APacketSource::initCheck() const {
return mInitCheck;
}
-status_t APacketSource::start(MetaData *params) {
- return OK;
-}
-
-status_t APacketSource::stop() {
- return OK;
-}
-
sp<MetaData> APacketSource::getFormat() {
return mFormat;
}
-status_t APacketSource::read(
- MediaBuffer **out, const ReadOptions *) {
- *out = NULL;
-
- Mutex::Autolock autoLock(mLock);
- while (mEOSResult == OK && mBuffers.empty()) {
- mCondition.wait(mLock);
- }
-
- if (!mBuffers.empty()) {
- const sp<ABuffer> buffer = *mBuffers.begin();
-
- updateNormalPlayTime_l(buffer);
-
- int64_t timeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
- MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
- mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
-
- *out = mediaBuffer;
-
- mBuffers.erase(mBuffers.begin());
- return OK;
- }
-
- return mEOSResult;
-}
-
-void APacketSource::updateNormalPlayTime_l(const sp<ABuffer> &buffer) {
- uint32_t rtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-
- mLastNormalPlayTimeUs =
- (((double)rtpTime - (double)mRTPTimeBase) / mClockRate)
- * 1000000ll
- + mNormalPlayTimeBaseUs;
-}
-
-void APacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
- int32_t damaged;
- if (buffer->meta()->findInt32("damaged", &damaged) && damaged) {
- LOGV("discarding damaged AU");
- return;
- }
-
- if (mScanForIDR && mIsAVC) {
- // This pretty piece of code ensures that the first access unit
- // fed to the decoder after stream-start or seek is guaranteed to
- // be an IDR frame. This is to workaround limitations of a certain
- // hardware h.264 decoder that requires this to be the case.
-
- if (!IsIDR(buffer)) {
- LOGV("skipping AU while scanning for next IDR frame.");
- return;
- }
-
- mScanForIDR = false;
- }
-
- Mutex::Autolock autoLock(mLock);
- mBuffers.push_back(buffer);
- mCondition.signal();
-}
-
-void APacketSource::signalEOS(status_t result) {
- CHECK(result != OK);
-
- Mutex::Autolock autoLock(mLock);
- mEOSResult = result;
- mCondition.signal();
-}
-
-void APacketSource::flushQueue() {
- Mutex::Autolock autoLock(mLock);
- mBuffers.clear();
-
- mScanForIDR = true;
-}
-
-int64_t APacketSource::getNormalPlayTimeUs() {
- Mutex::Autolock autoLock(mLock);
- return mLastNormalPlayTimeUs;
-}
-
-void APacketSource::setNormalPlayTimeMapping(
- uint32_t rtpTime, int64_t normalPlayTimeUs) {
- Mutex::Autolock autoLock(mLock);
-
- mRTPTimeBase = rtpTime;
- mNormalPlayTimeBaseUs = normalPlayTimeUs;
-}
-
-int64_t APacketSource::getQueueDurationUs(bool *eos) {
- Mutex::Autolock autoLock(mLock);
-
- *eos = (mEOSResult != OK);
-
- if (mBuffers.size() < 2) {
- return 0;
- }
-
- const sp<ABuffer> first = *mBuffers.begin();
- const sp<ABuffer> last = *--mBuffers.end();
-
- int64_t firstTimeUs;
- CHECK(first->meta()->findInt64("timeUs", &firstTimeUs));
-
- int64_t lastTimeUs;
- CHECK(last->meta()->findInt64("timeUs", &lastTimeUs));
-
- if (lastTimeUs < firstTimeUs) {
- LOGE("Huh? Time moving backwards? %lld > %lld",
- firstTimeUs, lastTimeUs);
-
- return 0;
- }
-
- return lastTimeUs - firstTimeUs;
-}
-
} // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/APacketSource.h
index 7a77fc6..530e537 100644
--- a/media/libstagefright/rtsp/APacketSource.h
+++ b/media/libstagefright/rtsp/APacketSource.h
@@ -19,63 +19,27 @@
#define A_PACKET_SOURCE_H_
#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
-#include <utils/threads.h>
-#include <utils/List.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/RefBase.h>
namespace android {
-struct ABuffer;
struct ASessionDescription;
-struct APacketSource : public MediaSource {
+struct APacketSource : public RefBase {
APacketSource(const sp<ASessionDescription> &sessionDesc, size_t index);
status_t initCheck() const;
- virtual status_t start(MetaData *params = NULL);
- virtual status_t stop();
virtual sp<MetaData> getFormat();
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options = NULL);
-
- void queueAccessUnit(const sp<ABuffer> &buffer);
- void signalEOS(status_t result);
-
- void flushQueue();
-
- int64_t getNormalPlayTimeUs();
-
- void setNormalPlayTimeMapping(
- uint32_t rtpTime, int64_t normalPlayTimeUs);
-
- int64_t getQueueDurationUs(bool *eos);
-
protected:
virtual ~APacketSource();
private:
status_t mInitCheck;
- Mutex mLock;
- Condition mCondition;
-
sp<MetaData> mFormat;
- List<sp<ABuffer> > mBuffers;
- status_t mEOSResult;
-
- bool mIsAVC;
- bool mScanForIDR;
-
- uint32_t mClockRate;
-
- uint32_t mRTPTimeBase;
- int64_t mNormalPlayTimeBaseUs;
-
- int64_t mLastNormalPlayTimeUs;
-
- void updateNormalPlayTime_l(const sp<ABuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(APacketSource);
};
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 47de4e0..cd374e2 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -220,7 +220,7 @@ void ARTPConnection::onRemoveStream(const sp<AMessage> &msg) {
}
if (it == mStreams.end()) {
- TRESPASS();
+ return;
}
mStreams.erase(it);
@@ -274,41 +274,52 @@ void ARTPConnection::onPollStreams() {
}
int res = select(maxSocket + 1, &rs, NULL, NULL, &tv);
- CHECK_GE(res, 0);
if (res > 0) {
- for (List<StreamInfo>::iterator it = mStreams.begin();
- it != mStreams.end(); ++it) {
+ List<StreamInfo>::iterator it = mStreams.begin();
+ while (it != mStreams.end()) {
if ((*it).mIsInjected) {
+ ++it;
continue;
}
+ status_t err = OK;
if (FD_ISSET(it->mRTPSocket, &rs)) {
- receive(&*it, true);
+ err = receive(&*it, true);
}
- if (FD_ISSET(it->mRTCPSocket, &rs)) {
- receive(&*it, false);
+ if (err == OK && FD_ISSET(it->mRTCPSocket, &rs)) {
+ err = receive(&*it, false);
}
+
+ if (err == -ECONNRESET) {
+ // socket failure, this stream is dead, Jim.
+
+ LOGW("failed to receive RTP/RTCP datagram.");
+ it = mStreams.erase(it);
+ continue;
+ }
+
+ ++it;
}
}
- postPollEvent();
-
int64_t nowUs = ALooper::GetNowUs();
if (mLastReceiverReportTimeUs <= 0
|| mLastReceiverReportTimeUs + 5000000ll <= nowUs) {
sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
- for (List<StreamInfo>::iterator it = mStreams.begin();
- it != mStreams.end(); ++it) {
+ List<StreamInfo>::iterator it = mStreams.begin();
+ while (it != mStreams.end()) {
StreamInfo *s = &*it;
if (s->mIsInjected) {
+ ++it;
continue;
}
if (s->mNumRTCPPacketsReceived == 0) {
// We have never received any RTCP packets on this stream,
// we don't even know where to send a report.
+ ++it;
continue;
}
@@ -327,16 +338,34 @@ void ARTPConnection::onPollStreams() {
if (buffer->size() > 0) {
LOGV("Sending RR...");
- ssize_t n = sendto(
+ ssize_t n;
+ do {
+ n = sendto(
s->mRTCPSocket, buffer->data(), buffer->size(), 0,
(const struct sockaddr *)&s->mRemoteRTCPAddr,
sizeof(s->mRemoteRTCPAddr));
+ } while (n < 0 && errno == EINTR);
+
+ if (n <= 0) {
+ LOGW("failed to send RTCP receiver report (%s).",
+ n == 0 ? "connection gone" : strerror(errno));
+
+ it = mStreams.erase(it);
+ continue;
+ }
+
CHECK_EQ(n, (ssize_t)buffer->size());
mLastReceiverReportTimeUs = nowUs;
}
+
+ ++it;
}
}
+
+ if (!mStreams.empty()) {
+ postPollEvent();
+ }
}
status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
@@ -350,16 +379,19 @@ status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
(!receiveRTP && s->mNumRTCPPacketsReceived == 0)
? sizeof(s->mRemoteRTCPAddr) : 0;
- ssize_t nbytes = recvfrom(
+ ssize_t nbytes;
+ do {
+ nbytes = recvfrom(
receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
buffer->data(),
buffer->capacity(),
0,
remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+ } while (nbytes < 0 && errno == EINTR);
- if (nbytes < 0) {
- return -1;
+ if (nbytes <= 0) {
+ return -ECONNRESET;
}
buffer->setRange(0, nbytes);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index bd0e491..4f0363b 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -187,10 +187,13 @@ bool ARTSPConnection::ParseURL(
return true;
}
-static void MakeSocketBlocking(int s, bool blocking) {
+static status_t MakeSocketBlocking(int s, bool blocking) {
// Make socket non-blocking.
int flags = fcntl(s, F_GETFL, 0);
- CHECK_NE(flags, -1);
+
+ if (flags == -1) {
+ return UNKNOWN_ERROR;
+ }
if (blocking) {
flags &= ~O_NONBLOCK;
@@ -198,7 +201,9 @@ static void MakeSocketBlocking(int s, bool blocking) {
flags |= O_NONBLOCK;
}
- CHECK_NE(fcntl(s, F_SETFL, flags), -1);
+ flags = fcntl(s, F_SETFL, flags);
+
+ return flags == -1 ? UNKNOWN_ERROR : OK;
}
void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
@@ -302,27 +307,32 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
reply->post();
}
+void ARTSPConnection::performDisconnect() {
+ if (mUIDValid) {
+ HTTPBase::UnRegisterSocketUserTag(mSocket);
+ }
+ close(mSocket);
+ mSocket = -1;
+
+ flushPendingRequests();
+
+ mUser.clear();
+ mPass.clear();
+ mAuthType = NONE;
+ mNonce.clear();
+
+ mState = DISCONNECTED;
+}
+
void ARTSPConnection::onDisconnect(const sp<AMessage> &msg) {
if (mState == CONNECTED || mState == CONNECTING) {
- if (mUIDValid) {
- HTTPBase::UnRegisterSocketUserTag(mSocket);
- }
- close(mSocket);
- mSocket = -1;
-
- flushPendingRequests();
+ performDisconnect();
}
sp<AMessage> reply;
CHECK(msg->findMessage("reply", &reply));
reply->setInt32("result", OK);
- mState = DISCONNECTED;
-
- mUser.clear();
- mPass.clear();
- mAuthType = NONE;
- mNonce.clear();
reply->post();
}
@@ -427,21 +437,25 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
send(mSocket, request.c_str() + numBytesSent,
request.size() - numBytesSent, 0);
- if (n == 0) {
- // Server closed the connection.
- LOGE("Server unexpectedly closed the connection.");
+ if (n < 0 && errno == EINTR) {
+ continue;
+ }
- reply->setInt32("result", ERROR_IO);
- reply->post();
- return;
- } else if (n < 0) {
- if (errno == EINTR) {
- continue;
+ if (n <= 0) {
+ performDisconnect();
+
+ if (n == 0) {
+ // Server closed the connection.
+ LOGE("Server unexpectedly closed the connection.");
+
+ reply->setInt32("result", ERROR_IO);
+ reply->post();
+ } else {
+ LOGE("Error sending rtsp request. (%s)", strerror(errno));
+ reply->setInt32("result", -errno);
+ reply->post();
}
- LOGE("Error sending rtsp request.");
- reply->setInt32("result", -errno);
- reply->post();
return;
}
@@ -512,17 +526,22 @@ status_t ARTSPConnection::receive(void *data, size_t size) {
size_t offset = 0;
while (offset < size) {
ssize_t n = recv(mSocket, (uint8_t *)data + offset, size - offset, 0);
- if (n == 0) {
- // Server closed the connection.
- LOGE("Server unexpectedly closed the connection.");
- return ERROR_IO;
- } else if (n < 0) {
- if (errno == EINTR) {
- continue;
- }
- LOGE("Error reading rtsp response.");
- return -errno;
+ if (n < 0 && errno == EINTR) {
+ continue;
+ }
+
+ if (n <= 0) {
+ performDisconnect();
+
+ if (n == 0) {
+ // Server closed the connection.
+ LOGE("Server unexpectedly closed the connection.");
+ return ERROR_IO;
+ } else {
+ LOGE("Error reading rtsp response. (%s)", strerror(errno));
+ return -errno;
+ }
}
offset += (size_t)n;
@@ -681,24 +700,8 @@ bool ARTSPConnection::receiveRTSPReponse() {
if (contentLength > 0) {
response->mContent = new ABuffer(contentLength);
- size_t numBytesRead = 0;
- while (numBytesRead < contentLength) {
- ssize_t n = recv(
- mSocket, response->mContent->data() + numBytesRead,
- contentLength - numBytesRead, 0);
-
- if (n == 0) {
- // Server closed the connection.
- TRESPASS();
- } else if (n < 0) {
- if (errno == EINTR) {
- continue;
- }
-
- TRESPASS();
- }
-
- numBytesRead += (size_t)n;
+ if (receive(response->mContent->data(), contentLength) != OK) {
+ return false;
}
}
@@ -765,17 +768,20 @@ bool ARTSPConnection::handleServerRequest(const sp<ARTSPResponse> &request) {
send(mSocket, response.c_str() + numBytesSent,
response.size() - numBytesSent, 0);
- if (n == 0) {
- // Server closed the connection.
- LOGE("Server unexpectedly closed the connection.");
+ if (n < 0 && errno == EINTR) {
+ continue;
+ }
- return false;
- } else if (n < 0) {
- if (errno == EINTR) {
- continue;
+ if (n <= 0) {
+ if (n == 0) {
+ // Server closed the connection.
+ LOGE("Server unexpectedly closed the connection.");
+ } else {
+ LOGE("Error sending rtsp response (%s).", strerror(errno));
}
- LOGE("Error sending rtsp response.");
+ performDisconnect();
+
return false;
}
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 5cb84fd..68f2d59 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -91,6 +91,8 @@ private:
AString mUserAgent;
+ void performDisconnect();
+
void onConnect(const sp<AMessage> &msg);
void onDisconnect(const sp<AMessage> &msg);
void onCompleteConnection(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/ARTSPController.cpp b/media/libstagefright/rtsp/ARTSPController.cpp
deleted file mode 100644
index 2ebae7e..0000000
--- a/media/libstagefright/rtsp/ARTSPController.cpp
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ARTSPController.h"
-
-#include "MyHandler.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-ARTSPController::ARTSPController(const sp<ALooper> &looper)
- : mState(DISCONNECTED),
- mLooper(looper),
- mUIDValid(false),
- mSeekDoneCb(NULL),
- mSeekDoneCookie(NULL),
- mLastSeekCompletedTimeUs(-1) {
- mReflector = new AHandlerReflector<ARTSPController>(this);
- looper->registerHandler(mReflector);
-}
-
-ARTSPController::~ARTSPController() {
- CHECK_EQ((int)mState, (int)DISCONNECTED);
- mLooper->unregisterHandler(mReflector->id());
-}
-
-void ARTSPController::setUID(uid_t uid) {
- mUIDValid = true;
- mUID = uid;
-}
-
-status_t ARTSPController::connect(const char *url) {
- Mutex::Autolock autoLock(mLock);
-
- if (mState != DISCONNECTED) {
- return ERROR_ALREADY_CONNECTED;
- }
-
- sp<AMessage> msg = new AMessage(kWhatConnectDone, mReflector->id());
-
- mHandler = new MyHandler(url, mLooper, mUIDValid, mUID);
-
- mState = CONNECTING;
-
- mHandler->connect(msg);
-
- while (mState == CONNECTING) {
- mCondition.wait(mLock);
- }
-
- if (mState != CONNECTED) {
- mHandler.clear();
- }
-
- return mConnectionResult;
-}
-
-void ARTSPController::disconnect() {
- Mutex::Autolock autoLock(mLock);
-
- if (mState == CONNECTING) {
- mState = DISCONNECTED;
- mConnectionResult = ERROR_IO;
- mCondition.broadcast();
-
- mHandler.clear();
- return;
- } else if (mState != CONNECTED) {
- return;
- }
-
- sp<AMessage> msg = new AMessage(kWhatDisconnectDone, mReflector->id());
- mHandler->disconnect(msg);
-
- while (mState == CONNECTED) {
- mCondition.wait(mLock);
- }
-
- mHandler.clear();
-}
-
-void ARTSPController::seekAsync(
- int64_t timeUs,
- void (*seekDoneCb)(void *), void *cookie) {
- Mutex::Autolock autoLock(mLock);
-
- CHECK(seekDoneCb != NULL);
- CHECK(mSeekDoneCb == NULL);
-
- // Ignore seek requests that are too soon after the previous one has
- // completed, we don't want to swamp the server.
-
- bool tooEarly =
- mLastSeekCompletedTimeUs >= 0
- && ALooper::GetNowUs() < mLastSeekCompletedTimeUs + 500000ll;
-
- if (mState != CONNECTED || tooEarly) {
- (*seekDoneCb)(cookie);
- return;
- }
-
- mSeekDoneCb = seekDoneCb;
- mSeekDoneCookie = cookie;
-
- sp<AMessage> msg = new AMessage(kWhatSeekDone, mReflector->id());
- mHandler->seek(timeUs, msg);
-}
-
-size_t ARTSPController::countTracks() {
- if (mHandler == NULL) {
- return 0;
- }
-
- return mHandler->countTracks();
-}
-
-sp<MediaSource> ARTSPController::getTrack(size_t index) {
- CHECK(mHandler != NULL);
-
- return mHandler->getPacketSource(index);
-}
-
-sp<MetaData> ARTSPController::getTrackMetaData(
- size_t index, uint32_t flags) {
- CHECK(mHandler != NULL);
-
- return mHandler->getPacketSource(index)->getFormat();
-}
-
-void ARTSPController::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatConnectDone:
- {
- Mutex::Autolock autoLock(mLock);
-
- CHECK(msg->findInt32("result", &mConnectionResult));
- mState = (mConnectionResult == OK) ? CONNECTED : DISCONNECTED;
-
- mCondition.signal();
- break;
- }
-
- case kWhatDisconnectDone:
- {
- Mutex::Autolock autoLock(mLock);
- mState = DISCONNECTED;
- mCondition.signal();
- break;
- }
-
- case kWhatSeekDone:
- {
- LOGI("seek done");
-
- mLastSeekCompletedTimeUs = ALooper::GetNowUs();
-
- void (*seekDoneCb)(void *) = mSeekDoneCb;
- mSeekDoneCb = NULL;
-
- (*seekDoneCb)(mSeekDoneCookie);
- break;
- }
-
- default:
- TRESPASS();
- break;
- }
-}
-
-int64_t ARTSPController::getNormalPlayTimeUs() {
- CHECK(mHandler != NULL);
- return mHandler->getNormalPlayTimeUs();
-}
-
-int64_t ARTSPController::getQueueDurationUs(bool *eos) {
- *eos = true;
-
- int64_t minQueuedDurationUs = 0;
- for (size_t i = 0; i < mHandler->countTracks(); ++i) {
- sp<APacketSource> source = mHandler->getPacketSource(i);
-
- bool newEOS;
- int64_t queuedDurationUs = source->getQueueDurationUs(&newEOS);
-
- if (!newEOS) {
- *eos = false;
- }
-
- if (i == 0 || queuedDurationUs < minQueuedDurationUs) {
- minQueuedDurationUs = queuedDurationUs;
- }
- }
-
- return minQueuedDurationUs;
-}
-
-} // namespace android
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 8530ff3..8230347 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -15,7 +15,6 @@ LOCAL_SRC_FILES:= \
ARTPSource.cpp \
ARTPWriter.cpp \
ARTSPConnection.cpp \
- ARTSPController.cpp \
ASessionDescription.cpp \
LOCAL_C_INCLUDES:= \
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 8128813..794c60b 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -44,12 +44,14 @@
// If no access units are received within 5 secs, assume that the rtp
// stream has ended and signal end of stream.
-static int64_t kAccessUnitTimeoutUs = 5000000ll;
+static int64_t kAccessUnitTimeoutUs = 10000000ll;
// If no access units arrive for the first 10 secs after starting the
// stream, assume none ever will and signal EOS or switch transports.
static int64_t kStartupTimeoutUs = 10000000ll;
+static int64_t kDefaultKeepAliveTimeoutUs = 60000000ll;
+
namespace android {
static void MakeUserAgentString(AString *s) {
@@ -94,12 +96,24 @@ static bool GetAttribute(const char *s, const char *key, AString *value) {
}
struct MyHandler : public AHandler {
+ enum {
+ kWhatConnected = 'conn',
+ kWhatDisconnected = 'disc',
+ kWhatSeekDone = 'sdon',
+
+ kWhatAccessUnit = 'accU',
+ kWhatEOS = 'eos!',
+ kWhatSeekDiscontinuity = 'seeD',
+ kWhatNormalPlayTimeMapping = 'nptM',
+ };
+
MyHandler(
- const char *url, const sp<ALooper> &looper,
+ const char *url,
+ const sp<AMessage> &notify,
bool uidValid = false, uid_t uid = 0)
- : mUIDValid(uidValid),
+ : mNotify(notify),
+ mUIDValid(uidValid),
mUID(uid),
- mLooper(looper),
mNetLooper(new ALooper),
mConn(new ARTSPConnection(mUIDValid, mUID)),
mRTPConn(new ARTPConnection),
@@ -118,7 +132,9 @@ struct MyHandler : public AHandler {
mTryFakeRTCP(false),
mReceivedFirstRTCPPacket(false),
mReceivedFirstRTPPacket(false),
- mSeekable(false) {
+ mSeekable(false),
+ mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs),
+ mKeepAliveGeneration(0) {
mNetLooper->setName("rtsp net");
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
@@ -145,12 +161,9 @@ struct MyHandler : public AHandler {
mSessionHost = host;
}
- void connect(const sp<AMessage> &doneMsg) {
- mDoneMsg = doneMsg;
-
- mLooper->registerHandler(this);
- mLooper->registerHandler(mConn);
- (1 ? mNetLooper : mLooper)->registerHandler(mRTPConn);
+ void connect() {
+ looper()->registerHandler(mConn);
+ (1 ? mNetLooper : looper())->registerHandler(mRTPConn);
sp<AMessage> notify = new AMessage('biny', id());
mConn->observeBinaryData(notify);
@@ -159,33 +172,16 @@ struct MyHandler : public AHandler {
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
- void disconnect(const sp<AMessage> &doneMsg) {
- mDoneMsg = doneMsg;
-
+ void disconnect() {
(new AMessage('abor', id()))->post();
}
- void seek(int64_t timeUs, const sp<AMessage> &doneMsg) {
+ void seek(int64_t timeUs) {
sp<AMessage> msg = new AMessage('seek', id());
msg->setInt64("time", timeUs);
- msg->setMessage("doneMsg", doneMsg);
msg->post();
}
- int64_t getNormalPlayTimeUs() {
- int64_t maxTimeUs = 0;
- for (size_t i = 0; i < mTracks.size(); ++i) {
- int64_t timeUs = mTracks.editItemAt(i).mPacketSource
- ->getNormalPlayTimeUs();
-
- if (i == 0 || timeUs > maxTimeUs) {
- maxTimeUs = timeUs;
- }
- }
-
- return maxTimeUs;
- }
-
static void addRR(const sp<ABuffer> &buf) {
uint8_t *ptr = buf->data() + buf->size();
ptr[0] = 0x80 | 0;
@@ -379,6 +375,8 @@ struct MyHandler : public AHandler {
case 'disc':
{
+ ++mKeepAliveGeneration;
+
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
sp<AMessage> reply = new AMessage('conn', id());
@@ -465,8 +463,17 @@ struct MyHandler : public AHandler {
mBaseURL = tmp;
}
- CHECK_GT(mSessionDesc->countTracks(), 1u);
- setupTrack(1);
+ if (mSessionDesc->countTracks() < 2) {
+ // There's no actual tracks in this session.
+ // The first "track" is merely session meta
+ // data.
+
+ LOGW("Session doesn't contain any playable "
+ "tracks. Aborting.");
+ result = ERROR_UNSUPPORTED;
+ } else {
+ setupTrack(1);
+ }
}
}
}
@@ -510,6 +517,34 @@ struct MyHandler : public AHandler {
CHECK_GE(i, 0);
mSessionID = response->mHeaders.valueAt(i);
+
+ mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+ AString timeoutStr;
+ if (GetAttribute(
+ mSessionID.c_str(), "timeout", &timeoutStr)) {
+ char *end;
+ unsigned long timeoutSecs =
+ strtoul(timeoutStr.c_str(), &end, 10);
+
+ if (end == timeoutStr.c_str() || *end != '\0') {
+ LOGW("server specified malformed timeout '%s'",
+ timeoutStr.c_str());
+
+ mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+ } else if (timeoutSecs < 15) {
+ LOGW("server specified too short a timeout "
+ "(%lu secs), using default.",
+ timeoutSecs);
+
+ mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+ } else {
+ mKeepAliveTimeoutUs = timeoutSecs * 1000000ll;
+
+ LOGI("server specified timeout of %lu secs.",
+ timeoutSecs);
+ }
+ }
+
i = mSessionID.find(";");
if (i >= 0) {
// Remove options, i.e. ";timeout=90"
@@ -563,6 +598,9 @@ struct MyHandler : public AHandler {
if (index < mSessionDesc->countTracks()) {
setupTrack(index);
} else if (mSetupTracksSuccessful) {
+ ++mKeepAliveGeneration;
+ postKeepAlive();
+
AString request = "PLAY ";
request.append(mSessionURL);
request.append(" RTSP/1.0\r\n");
@@ -614,12 +652,59 @@ struct MyHandler : public AHandler {
break;
}
+ case 'aliv':
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mKeepAliveGeneration) {
+ // obsolete event.
+ break;
+ }
+
+ AString request;
+ request.append("OPTIONS ");
+ request.append(mSessionURL);
+ request.append(" RTSP/1.0\r\n");
+ request.append("Session: ");
+ request.append(mSessionID);
+ request.append("\r\n");
+ request.append("\r\n");
+
+ sp<AMessage> reply = new AMessage('opts', id());
+ reply->setInt32("generation", mKeepAliveGeneration);
+ mConn->sendRequest(request.c_str(), reply);
+ break;
+ }
+
+ case 'opts':
+ {
+ int32_t result;
+ CHECK(msg->findInt32("result", &result));
+
+ LOGI("OPTIONS completed with result %d (%s)",
+ result, strerror(-result));
+
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mKeepAliveGeneration) {
+ // obsolete event.
+ break;
+ }
+
+ postKeepAlive();
+ break;
+ }
+
case 'abor':
{
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
- info->mPacketSource->signalEOS(ERROR_END_OF_STREAM);
+ if (!mFirstAccessUnit) {
+ postQueueEOS(i, ERROR_END_OF_STREAM);
+ }
if (!info->mUsingInterleavedTCP) {
mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
@@ -690,11 +775,10 @@ struct MyHandler : public AHandler {
case 'quit':
{
- if (mDoneMsg != NULL) {
- mDoneMsg->setInt32("result", UNKNOWN_ERROR);
- mDoneMsg->post();
- mDoneMsg = NULL;
- }
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatDisconnected);
+ msg->setInt32("result", UNKNOWN_ERROR);
+ msg->post();
break;
}
@@ -708,9 +792,13 @@ struct MyHandler : public AHandler {
}
if (mNumAccessUnitsReceived == 0) {
+#if 1
LOGI("stream ended? aborting.");
(new AMessage('abor', id()))->post();
break;
+#else
+ LOGI("haven't seen an AU in a looong time.");
+#endif
}
mNumAccessUnitsReceived = 0;
@@ -795,17 +883,12 @@ struct MyHandler : public AHandler {
case 'seek':
{
- sp<AMessage> doneMsg;
- CHECK(msg->findMessage("doneMsg", &doneMsg));
-
- if (mSeekPending) {
- doneMsg->post();
- break;
- }
-
if (!mSeekable) {
LOGW("This is a live stream, ignoring seek request.");
- doneMsg->post();
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatSeekDone);
+ msg->post();
break;
}
@@ -831,7 +914,6 @@ struct MyHandler : public AHandler {
sp<AMessage> reply = new AMessage('see1', id());
reply->setInt64("time", timeUs);
- reply->setMessage("doneMsg", doneMsg);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -842,7 +924,8 @@ struct MyHandler : public AHandler {
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
- info->mPacketSource->flushQueue();
+ postQueueSeekDiscontinuity(i);
+
info->mRTPAnchor = 0;
info->mNTPAnchorUs = -1;
}
@@ -866,11 +949,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> doneMsg;
- CHECK(msg->findMessage("doneMsg", &doneMsg));
-
sp<AMessage> reply = new AMessage('see2', id());
- reply->setMessage("doneMsg", doneMsg);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -915,10 +994,9 @@ struct MyHandler : public AHandler {
mSeekPending = false;
- sp<AMessage> doneMsg;
- CHECK(msg->findMessage("doneMsg", &doneMsg));
-
- doneMsg->post();
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatSeekDone);
+ msg->post();
break;
}
@@ -969,6 +1047,12 @@ struct MyHandler : public AHandler {
}
}
+ void postKeepAlive() {
+ sp<AMessage> msg = new AMessage('aliv', id());
+ msg->setInt32("generation", mKeepAliveGeneration);
+ msg->post((mKeepAliveTimeoutUs * 9) / 10);
+ }
+
void postAccessUnitTimeoutCheck() {
if (mCheckPending) {
return;
@@ -1056,8 +1140,14 @@ struct MyHandler : public AHandler {
LOGV("track #%d: rtpTime=%u <=> npt=%.2f", n, rtpTime, npt1);
- info->mPacketSource->setNormalPlayTimeMapping(
- rtpTime, (int64_t)(npt1 * 1E6));
+ info->mNormalPlayTimeRTP = rtpTime;
+ info->mNormalPlayTimeUs = (int64_t)(npt1 * 1E6);
+
+ if (!mFirstAccessUnit) {
+ postNormalPlayTimeMapping(
+ trackIndex,
+ info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+ }
++n;
}
@@ -1065,11 +1155,15 @@ struct MyHandler : public AHandler {
mSeekable = true;
}
- sp<APacketSource> getPacketSource(size_t index) {
+ sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale) {
CHECK_GE(index, 0u);
CHECK_LT(index, mTracks.size());
- return mTracks.editItemAt(index).mPacketSource;
+ const TrackInfo &info = mTracks.itemAt(index);
+
+ *timeScale = info.mTimeScale;
+
+ return info.mPacketSource->getFormat();
}
size_t countTracks() const {
@@ -1089,6 +1183,9 @@ private:
int64_t mNTPAnchorUs;
int32_t mTimeScale;
+ uint32_t mNormalPlayTimeRTP;
+ int64_t mNormalPlayTimeUs;
+
sp<APacketSource> mPacketSource;
// Stores packets temporarily while no notion of time
@@ -1096,9 +1193,9 @@ private:
List<sp<ABuffer> > mPackets;
};
+ sp<AMessage> mNotify;
bool mUIDValid;
uid_t mUID;
- sp<ALooper> mLooper;
sp<ALooper> mNetLooper;
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
@@ -1124,11 +1221,11 @@ private:
bool mReceivedFirstRTCPPacket;
bool mReceivedFirstRTPPacket;
bool mSeekable;
+ int64_t mKeepAliveTimeoutUs;
+ int32_t mKeepAliveGeneration;
Vector<TrackInfo> mTracks;
- sp<AMessage> mDoneMsg;
-
void setupTrack(size_t index) {
sp<APacketSource> source =
new APacketSource(mSessionDesc, index);
@@ -1158,6 +1255,8 @@ private:
info->mNewSegment = true;
info->mRTPAnchor = 0;
info->mNTPAnchorUs = -1;
+ info->mNormalPlayTimeRTP = 0;
+ info->mNormalPlayTimeUs = 0ll;
unsigned long PT;
AString formatDesc;
@@ -1283,9 +1382,17 @@ private:
LOGV("onAccessUnitComplete track %d", trackIndex);
if (mFirstAccessUnit) {
- mDoneMsg->setInt32("result", OK);
- mDoneMsg->post();
- mDoneMsg = NULL;
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatConnected);
+ msg->post();
+
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ TrackInfo *info = &mTracks.editItemAt(i);
+
+ postNormalPlayTimeMapping(
+ i,
+ info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+ }
mFirstAccessUnit = false;
}
@@ -1303,12 +1410,12 @@ private:
track->mPackets.erase(track->mPackets.begin());
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
- track->mPacketSource->queueAccessUnit(accessUnit);
+ postQueueAccessUnit(trackIndex, accessUnit);
}
}
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
- track->mPacketSource->queueAccessUnit(accessUnit);
+ postQueueAccessUnit(trackIndex, accessUnit);
}
}
@@ -1344,6 +1451,39 @@ private:
return true;
}
+ void postQueueAccessUnit(
+ size_t trackIndex, const sp<ABuffer> &accessUnit) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatAccessUnit);
+ msg->setSize("trackIndex", trackIndex);
+ msg->setObject("accessUnit", accessUnit);
+ msg->post();
+ }
+
+ void postQueueEOS(size_t trackIndex, status_t finalResult) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatEOS);
+ msg->setSize("trackIndex", trackIndex);
+ msg->setInt32("finalResult", finalResult);
+ msg->post();
+ }
+
+ void postQueueSeekDiscontinuity(size_t trackIndex) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatSeekDiscontinuity);
+ msg->setSize("trackIndex", trackIndex);
+ msg->post();
+ }
+
+ void postNormalPlayTimeMapping(
+ size_t trackIndex, uint32_t rtpTime, int64_t nptUs) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatNormalPlayTimeMapping);
+ msg->setSize("trackIndex", trackIndex);
+ msg->setInt32("rtpTime", rtpTime);
+ msg->setInt64("nptUs", nptUs);
+ msg->post();
+ }
DISALLOW_EVIL_CONSTRUCTORS(MyHandler);
};
diff --git a/media/tests/MediaFrameworkTest/Android.mk b/media/tests/MediaFrameworkTest/Android.mk
index 9c45e6e..c9afa19 100644
--- a/media/tests/MediaFrameworkTest/Android.mk
+++ b/media/tests/MediaFrameworkTest/Android.mk
@@ -7,6 +7,8 @@ LOCAL_SRC_FILES := $(call all-subdir-java-files)
LOCAL_JAVA_LIBRARIES := android.test.runner
+LOCAL_STATIC_JAVA_LIBRARIES := easymocklib
+
LOCAL_PACKAGE_NAME := mediaframeworktest
include $(BUILD_PACKAGE)
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
index a203adc..62af3f3 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java
@@ -47,6 +47,7 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner {
addMediaMetadataRetrieverStateUnitTests(suite);
addMediaRecorderStateUnitTests(suite);
addMediaPlayerStateUnitTests(suite);
+ addMediaScannerUnitTests(suite);
return suite;
}
@@ -89,4 +90,8 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner {
suite.addTestSuite(MediaPlayerSetVolumeStateUnitTest.class);
suite.addTestSuite(MediaPlayerMetadataParserTest.class);
}
+
+ private void addMediaScannerUnitTests(TestSuite suite) {
+ suite.addTestSuite(MediaInserterTest.class);
+ }
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaBassBoostTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaBassBoostTest.java
index e3aa8cf..1fa5c0d 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaBassBoostTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaBassBoostTest.java
@@ -44,13 +44,7 @@ import java.util.UUID;
*/
public class MediaBassBoostTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> {
private String TAG = "MediaBassBoostTest";
- private final static int MIN_ENERGY_RATIO_2 = 3;
private final static short TEST_STRENGTH = 500;
- private final static int TEST_VOLUME = 4;
- // Implementor UUID for volume controller effect defined in
- // frameworks/base/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
- private final static UUID VOLUME_EFFECT_UUID =
- UUID.fromString("119341a0-8469-11df-81f9-0002a5d5c51b");
private BassBoost mBassBoost = null;
private int mSession = -1;
@@ -184,85 +178,6 @@ public class MediaBassBoostTest extends ActivityInstrumentationTestCase2<MediaFr
}
//-----------------------------------------------------------------
- // 2 - Effect action
- //----------------------------------
-
- //Test case 2.0: test actual bass boost influence on sound
- @LargeTest
- public void test2_0SoundModification() throws Exception {
- boolean result = false;
- String msg = "test2_0SoundModification()";
- EnergyProbe probe = null;
- AudioEffect vc = null;
- MediaPlayer mp = null;
- AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
- int volume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
- am.setStreamVolume(AudioManager.STREAM_MUSIC,
- TEST_VOLUME,
- 0);
-
- try {
- probe = new EnergyProbe(0);
- // creating a volume controller on output mix ensures that ro.audio.silent mutes
- // audio after the effects and not before
- vc = new AudioEffect(
- AudioEffect.EFFECT_TYPE_NULL,
- VOLUME_EFFECT_UUID,
- 0,
- 0);
- vc.setEnabled(true);
-
- mp = new MediaPlayer();
- mp.setDataSource(MediaNames.SINE_200_1000);
- mp.setLooping(true);
- mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
- getBassBoost(mp.getAudioSessionId());
- mp.prepare();
- mp.start();
- Thread.sleep(200);
- // measure reference energy around 1kHz
- int refEnergy200 = probe.capture(200);
- int refEnergy1000 = probe.capture(1000);
- mBassBoost.setStrength((short)1000);
- mBassBoost.setEnabled(true);
- Thread.sleep(4000);
- // measure energy around 1kHz with band level at min
- int energy200 = probe.capture(200);
- int energy1000 = probe.capture(1000);
- // verify that the energy ration between low and high frequencies is at least
- // MIN_ENERGY_RATIO_2 times higher with bassboost on.
- assertTrue(msg + ": bass boost has no effect",
- ((float)energy200/(float)energy1000) >
- (MIN_ENERGY_RATIO_2 * ((float)refEnergy200/(float)refEnergy1000)));
- result = true;
- } catch (IllegalArgumentException e) {
- msg = msg.concat(": Bad parameter value");
- loge(msg, "Bad parameter value");
- } catch (UnsupportedOperationException e) {
- msg = msg.concat(": get parameter() rejected");
- loge(msg, "get parameter() rejected");
- } catch (IllegalStateException e) {
- msg = msg.concat("get parameter() called in wrong state");
- loge(msg, "get parameter() called in wrong state");
- } catch (InterruptedException e) {
- loge(msg, "sleep() interrupted");
- }
- finally {
- releaseBassBoost();
- if (mp != null) {
- mp.release();
- }
- if (vc != null) {
- vc.release();
- }
- if (probe != null) {
- probe.release();
- }
- am.setStreamVolume(AudioManager.STREAM_MUSIC, volume, 0);
- }
- assertTrue(msg, result);
- }
- //-----------------------------------------------------------------
// private methods
//----------------------------------
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaEqualizerTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaEqualizerTest.java
index ee91bbb..da9089d 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaEqualizerTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaEqualizerTest.java
@@ -49,11 +49,6 @@ public class MediaEqualizerTest extends ActivityInstrumentationTestCase2<MediaFr
private final static int MAX_BAND_LEVEL = 1500;
private final static int TEST_FREQUENCY_MILLIHERTZ = 1000000;
private final static int MIN_NUMBER_OF_PRESETS = 4;
- private final static int TEST_VOLUME = 4;
- // Implementor UUID for volume controller effect defined in
- // frameworks/base/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
- private final static UUID VOLUME_EFFECT_UUID =
- UUID.fromString("119341a0-8469-11df-81f9-0002a5d5c51b");
private Equalizer mEqualizer = null;
private int mSession = -1;
@@ -252,80 +247,6 @@ public class MediaEqualizerTest extends ActivityInstrumentationTestCase2<MediaFr
}
//-----------------------------------------------------------------
- // 2 - Effect action
- //----------------------------------
-
- //Test case 2.0: test that the equalizer actually alters the sound
- @LargeTest
- public void test2_0SoundModification() throws Exception {
- boolean result = false;
- String msg = "test2_0SoundModification()";
- EnergyProbe probe = null;
- AudioEffect vc = null;
- MediaPlayer mp = null;
- AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
- int volume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
- am.setStreamVolume(AudioManager.STREAM_MUSIC,
- TEST_VOLUME,
- 0);
- try {
- probe = new EnergyProbe(0);
- // creating a volume controller on output mix ensures that ro.audio.silent mutes
- // audio after the effects and not before
- vc = new AudioEffect(
- AudioEffect.EFFECT_TYPE_NULL,
- VOLUME_EFFECT_UUID,
- 0,
- 0);
- vc.setEnabled(true);
-
- mp = new MediaPlayer();
- mp.setDataSource(MediaNames.SINE_200_1000);
- mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
- getEqualizer(mp.getAudioSessionId());
- mp.prepare();
- mp.start();
- Thread.sleep(500);
- // measure reference energy around 1kHz
- int refEnergy = probe.capture(1000);
- short band = mEqualizer.getBand(1000000);
- short[] levelRange = mEqualizer.getBandLevelRange();
- mEqualizer.setBandLevel(band, levelRange[0]);
- mEqualizer.setEnabled(true);
- Thread.sleep(500);
- // measure energy around 1kHz with band level at min
- int energy = probe.capture(1000);
- assertTrue(msg + ": equalizer has no effect at 1kHz", energy < refEnergy/4);
- result = true;
- } catch (IllegalArgumentException e) {
- msg = msg.concat(": Bad parameter value");
- loge(msg, "Bad parameter value");
- } catch (UnsupportedOperationException e) {
- msg = msg.concat(": get parameter() rejected");
- loge(msg, "get parameter() rejected");
- } catch (IllegalStateException e) {
- msg = msg.concat("get parameter() called in wrong state");
- loge(msg, "get parameter() called in wrong state");
- } catch (InterruptedException e) {
- loge(msg, "sleep() interrupted");
- }
- finally {
- releaseEqualizer();
- if (mp != null) {
- mp.release();
- }
- if (vc != null) {
- vc.release();
- }
- if (probe != null) {
- probe.release();
- }
- am.setStreamVolume(AudioManager.STREAM_MUSIC, volume, 0);
- }
- assertTrue(msg, result);
- }
-
- //-----------------------------------------------------------------
// private methods
//----------------------------------
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaVirtualizerTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaVirtualizerTest.java
index b74e525..122545f 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaVirtualizerTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/MediaVirtualizerTest.java
@@ -44,13 +44,7 @@ import java.util.UUID;
*/
public class MediaVirtualizerTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> {
private String TAG = "MediaVirtualizerTest";
- private final static int MIN_ENERGY_RATIO_2 = 2;
private final static short TEST_STRENGTH = 500;
- private final static int TEST_VOLUME = 4;
- // Implementor UUID for volume controller effect defined in
- // frameworks/base/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
- private final static UUID VOLUME_EFFECT_UUID =
- UUID.fromString("119341a0-8469-11df-81f9-0002a5d5c51b");
private Virtualizer mVirtualizer = null;
private int mSession = -1;
@@ -185,89 +179,6 @@ public class MediaVirtualizerTest extends ActivityInstrumentationTestCase2<Media
}
//-----------------------------------------------------------------
- // 2 - Effect action
- //----------------------------------
-
- //Test case 2.0: test actual virtualizer influence on sound
- @LargeTest
- public void test2_0SoundModification() throws Exception {
- boolean result = false;
- String msg = "test2_0SoundModification()";
- EnergyProbe probe = null;
- AudioEffect vc = null;
- MediaPlayer mp = null;
- AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
- int volume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
- am.setStreamVolume(AudioManager.STREAM_MUSIC,
- TEST_VOLUME,
- 0);
-
- try {
- probe = new EnergyProbe(0);
- // creating a volume controller on output mix ensures that ro.audio.silent mutes
- // audio after the effects and not before
- vc = new AudioEffect(
- AudioEffect.EFFECT_TYPE_NULL,
- VOLUME_EFFECT_UUID,
- 0,
- 0);
- vc.setEnabled(true);
-
- mp = new MediaPlayer();
- mp.setDataSource(MediaNames.SINE_200_1000);
- mp.setLooping(true);
- mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
- getVirtualizer(mp.getAudioSessionId());
- mp.prepare();
- mp.start();
- Thread.sleep(200);
- // measure reference energy around 1kHz
- int refEnergy200 = probe.capture(200);
- int refEnergy1000 = probe.capture(1000);
- mVirtualizer.setStrength((short)1000);
- mVirtualizer.setEnabled(true);
- Thread.sleep(4000);
- // measure energy around 1kHz with band level at min
- int energy200 = probe.capture(200);
- int energy1000 = probe.capture(1000);
- // verify that the energy ration between low and high frequencies is at least
- // MIN_ENERGY_RATIO_2 times higher with virtualizer on.
- // NOTE: this is what is observed with current virtualizer implementation and the test
- // audio file but is not the primary effect of the virtualizer. A better way would
- // be to have a stereo PCM capture and check that a strongly paned input is centered
- // when output. However, we cannot capture stereo with the visualizer.
- assertTrue(msg + ": virtualizer has no effect",
- ((float)energy200/(float)energy1000) >
- (MIN_ENERGY_RATIO_2 * ((float)refEnergy200/(float)refEnergy1000)));
- result = true;
- } catch (IllegalArgumentException e) {
- msg = msg.concat(": Bad parameter value");
- loge(msg, "Bad parameter value");
- } catch (UnsupportedOperationException e) {
- msg = msg.concat(": get parameter() rejected");
- loge(msg, "get parameter() rejected");
- } catch (IllegalStateException e) {
- msg = msg.concat("get parameter() called in wrong state");
- loge(msg, "get parameter() called in wrong state");
- } catch (InterruptedException e) {
- loge(msg, "sleep() interrupted");
- }
- finally {
- releaseVirtualizer();
- if (mp != null) {
- mp.release();
- }
- if (vc != null) {
- vc.release();
- }
- if (probe != null) {
- probe.release();
- }
- am.setStreamVolume(AudioManager.STREAM_MUSIC, volume, 0);
- }
- assertTrue(msg, result);
- }
- //-----------------------------------------------------------------
// private methods
//----------------------------------
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaInserterTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaInserterTest.java
new file mode 100644
index 0000000..ad3c342
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaInserterTest.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.unit;
+
+import android.content.ContentValues;
+import android.content.IContentProvider;
+import android.media.MediaInserter;
+import android.net.Uri;
+import android.provider.MediaStore.Audio;
+import android.provider.MediaStore.Files;
+import android.provider.MediaStore.Images;
+import android.provider.MediaStore.Video;
+import android.test.InstrumentationTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import dalvik.annotation.TestTargetClass;
+
+import org.easymock.EasyMock;
+import org.easymock.IArgumentMatcher;
+
+@TestTargetClass(MediaInserter.class)
+public class MediaInserterTest extends InstrumentationTestCase {
+
+ private MediaInserter mMediaInserter;
+ private static final int TEST_BUFFER_SIZE = 10;
+ private IContentProvider mMockProvider;
+
+ private int mFilesCounter;
+ private int mAudioCounter;
+ private int mVideoCounter;
+ private int mImagesCounter;
+
+ private static final String sVolumeName = "external";
+ private static final Uri sAudioUri = Audio.Media.getContentUri(sVolumeName);
+ private static final Uri sVideoUri = Video.Media.getContentUri(sVolumeName);
+ private static final Uri sImagesUri = Images.Media.getContentUri(sVolumeName);
+ private static final Uri sFilesUri = Files.getContentUri(sVolumeName);
+
+ private static class MediaUriMatcher implements IArgumentMatcher {
+ private Uri mUri;
+
+ private MediaUriMatcher(Uri uri) {
+ mUri = uri;
+ }
+
+ @Override
+ public boolean matches(Object argument) {
+ if (!(argument instanceof Uri)) {
+ return false;
+ }
+
+ Uri actualUri = (Uri) argument;
+ if (actualUri == mUri) return true;
+ return false;
+ }
+
+ @Override
+ public void appendTo(StringBuffer buffer) {
+ buffer.append("expected a TableUri '").append(mUri).append("'");
+ }
+
+ private static Uri expectMediaUri(Uri in) {
+ EasyMock.reportMatcher(new MediaUriMatcher(in));
+ return null;
+ }
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ mMockProvider = EasyMock.createMock(IContentProvider.class);
+ mMediaInserter = new MediaInserter(mMockProvider, TEST_BUFFER_SIZE);
+ mFilesCounter = 0;
+ mAudioCounter = 0;
+ mVideoCounter = 0;
+ mImagesCounter = 0;
+ }
+
+ private ContentValues createFileContent() {
+ ContentValues values = new ContentValues();
+ values.put("_data", "/mnt/sdcard/file" + ++mFilesCounter);
+ return values;
+ }
+
+ private ContentValues createAudioContent() {
+ ContentValues values = new ContentValues();
+ values.put("_data", "/mnt/sdcard/audio" + ++mAudioCounter);
+ return values;
+ }
+
+ private ContentValues createVideoContent() {
+ ContentValues values = new ContentValues();
+ values.put("_data", "/mnt/sdcard/video" + ++mVideoCounter);
+ return values;
+ }
+
+ private ContentValues createImageContent() {
+ ContentValues values = new ContentValues();
+ values.put("_data", "/mnt/sdcard/image" + ++mImagesCounter);
+ return values;
+ }
+
+ private ContentValues createContent(Uri uri) {
+ if (uri == sFilesUri) return createFileContent();
+ else if (uri == sAudioUri) return createAudioContent();
+ else if (uri == sVideoUri) return createVideoContent();
+ else if (uri == sImagesUri) return createImageContent();
+ else throw new IllegalArgumentException("Unknown URL: " + uri.toString());
+ }
+
+ private void fillBuffer(Uri uri, int numberOfFiles) throws Exception {
+ ContentValues values;
+ for (int i = 0; i < numberOfFiles; ++i) {
+ values = createContent(uri);
+ mMediaInserter.insert(uri, values);
+ }
+ }
+
+ @SmallTest
+ public void testInsertContentsLessThanBufferSize() throws Exception {
+ EasyMock.replay(mMockProvider);
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE - 4);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE - 3);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE - 2);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE - 1);
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testInsertContentsEqualToBufferSize() throws Exception {
+ EasyMock.expect(mMockProvider.bulkInsert(
+ (Uri) EasyMock.anyObject(), (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(4);
+ EasyMock.replay(mMockProvider);
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE);
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testInsertContentsMoreThanBufferSize() throws Exception {
+ EasyMock.expect(mMockProvider.bulkInsert(
+ (Uri) EasyMock.anyObject(), (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(4);
+ EasyMock.replay(mMockProvider);
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE + 1);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE + 2);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE + 3);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE + 4);
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testFlushAllWithEmptyContents() throws Exception {
+ EasyMock.replay(mMockProvider);
+
+ mMediaInserter.flushAll();
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testFlushAllWithSomeContents() throws Exception {
+ EasyMock.expect(mMockProvider.bulkInsert(
+ (Uri) EasyMock.anyObject(), (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(4);
+ EasyMock.replay(mMockProvider);
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE - 4);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE - 3);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE - 2);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE - 1);
+ mMediaInserter.flushAll();
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testInsertContentsAfterFlushAll() throws Exception {
+ EasyMock.expect(mMockProvider.bulkInsert(
+ (Uri) EasyMock.anyObject(), (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(8);
+ EasyMock.replay(mMockProvider);
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE - 4);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE - 3);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE - 2);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE - 1);
+ mMediaInserter.flushAll();
+
+ fillBuffer(sFilesUri, TEST_BUFFER_SIZE + 1);
+ fillBuffer(sAudioUri, TEST_BUFFER_SIZE + 2);
+ fillBuffer(sVideoUri, TEST_BUFFER_SIZE + 3);
+ fillBuffer(sImagesUri, TEST_BUFFER_SIZE + 4);
+
+ EasyMock.verify(mMockProvider);
+ }
+
+ @SmallTest
+ public void testInsertContentsWithDifferentSizePerContentType() throws Exception {
+ EasyMock.expect(mMockProvider.bulkInsert(MediaUriMatcher.expectMediaUri(sFilesUri),
+ (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(1);
+ EasyMock.expect(mMockProvider.bulkInsert(MediaUriMatcher.expectMediaUri(sAudioUri),
+ (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(2);
+ EasyMock.expect(mMockProvider.bulkInsert(MediaUriMatcher.expectMediaUri(sVideoUri),
+ (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(3);
+ EasyMock.expect(mMockProvider.bulkInsert(MediaUriMatcher.expectMediaUri(sImagesUri),
+ (ContentValues[]) EasyMock.anyObject())).andReturn(1);
+ EasyMock.expectLastCall().times(4);
+ EasyMock.replay(mMockProvider);
+
+ for (int i = 0; i < TEST_BUFFER_SIZE; ++i) {
+ fillBuffer(sFilesUri, 1);
+ fillBuffer(sAudioUri, 2);
+ fillBuffer(sVideoUri, 3);
+ fillBuffer(sImagesUri, 4);
+ }
+
+ EasyMock.verify(mMockProvider);
+ }
+}
diff --git a/media/tests/players/invoke_mock_media_player.cpp b/media/tests/players/invoke_mock_media_player.cpp
index ed3051b..a6fdeea 100644
--- a/media/tests/players/invoke_mock_media_player.cpp
+++ b/media/tests/players/invoke_mock_media_player.cpp
@@ -68,7 +68,6 @@ class Player: public MediaPlayerBase
}
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) {return OK;}
- virtual status_t setVideoSurface(const sp<Surface>& surface) {return OK;}
virtual status_t setVideoSurfaceTexture(
const sp<ISurfaceTexture>& surfaceTexture) {return OK;}
virtual status_t prepare() {return OK;}