summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioTrack.java48
-rw-r--r--media/java/android/media/MediaScanner.java4
-rw-r--r--media/libstagefright/AwesomePlayer.cpp19
-rwxr-xr-xmedia/libstagefright/OMXCodec.cpp3
-rw-r--r--media/libstagefright/include/AwesomePlayer.h2
5 files changed, 48 insertions, 28 deletions
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index b97c3c4..b20a6e9 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -32,24 +32,25 @@ import android.util.Log;
* It allows to stream PCM audio buffers to the audio hardware for playback. This is
* achieved by "pushing" the data to the AudioTrack object using one of the
* {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
- *
+ *
* <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
* In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
- * one of the write() methods. These are blocking and return when the data has been transferred
- * from the Java layer to the native layer and queued for playback. The streaming mode
- * is most useful when playing blocks of audio data that for instance are:
+ * one of the {@code write()} methods. These are blocking and return when the data has been
+ * transferred from the Java layer to the native layer and queued for playback. The streaming
+ * mode is most useful when playing blocks of audio data that for instance are:
+ *
* <ul>
* <li>too big to fit in memory because of the duration of the sound to play,</li>
* <li>too big to fit in memory because of the characteristics of the audio data
* (high sampling rate, bits per sample ...)</li>
* <li>received or generated while previously queued audio is playing.</li>
* </ul>
+ *
* The static mode is to be chosen when dealing with short sounds that fit in memory and
- * that need to be played with the smallest latency possible. AudioTrack instances in static mode
- * can play the sound without the need to transfer the audio data from Java to native layer
- * each time the sound is to be played. The static mode will therefore be preferred for UI and
- * game sounds that are played often, and with the smallest overhead possible.
- *
+ * that need to be played with the smallest latency possible. The static mode will
+ * therefore be preferred for UI and game sounds that are played often, and with the
+ * smallest overhead possible.
+ *
* <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
* The size of this buffer, specified during the construction, determines how long an AudioTrack
* can play before running out of data.<br>
@@ -816,6 +817,7 @@ public class AudioTrack
//--------------------
/**
* Starts playing an AudioTrack.
+ *
* @throws IllegalStateException
*/
public void play()
@@ -832,6 +834,7 @@ public class AudioTrack
/**
* Stops playing the audio data.
+ *
* @throws IllegalStateException
*/
public void stop()
@@ -848,7 +851,10 @@ public class AudioTrack
}
/**
- * Pauses the playback of the audio data.
+ * Pauses the playback of the audio data. Data that has not been played
+ * back will not be discarded. Subsequent calls to {@link #play} will play
+ * this data back.
+ *
* @throws IllegalStateException
*/
public void pause()
@@ -871,9 +877,9 @@ public class AudioTrack
//--------------------
/**
- * Flushes the audio data currently queued for playback.
+ * Flushes the audio data currently queued for playback. Any data that has
+ * not been played back will be discarded.
*/
-
public void flush() {
if (mState == STATE_INITIALIZED) {
// flush the data in native layer
@@ -883,9 +889,14 @@ public class AudioTrack
}
/**
- * Writes the audio data to the audio hardware for playback.
+ * Writes the audio data to the audio hardware for playback. Will block until
+ * all data has been written to the audio mixer.
+ * Note that the actual playback of this data might occur after this function
+ * returns. This function is thread safe with respect to {@link #stop} calls,
+ * in which case all of the specified data might not be written to the mixer.
+ *
* @param audioData the array that holds the data to play.
- * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
* starts.
* @param sizeInBytes the number of bytes to read in audioData after the offset.
* @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
@@ -914,7 +925,12 @@ public class AudioTrack
/**
- * Writes the audio data to the audio hardware for playback.
+ * Writes the audio data to the audio hardware for playback. Will block until
+ * all data has been written to the audio mixer.
+ * Note that the actual playback of this data might occur after this function
+ * returns. This function is thread safe with respect to {@link #stop} calls,
+ * in which case all of the specified data might not be written to the mixer.
+ *
* @param audioData the array that holds the data to play.
* @param offsetInShorts the offset expressed in shorts in audioData where the data to play
* starts.
@@ -988,7 +1004,7 @@ public class AudioTrack
/**
* Sets the send level of the audio track to the attached auxiliary effect
- * {@see #attachAuxEffect(int)}. The level value range is 0 to 1.0.
+ * {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
* <p>By default the send level is 0, so even if an effect is attached to the player
* this method must be called for the effect to be applied.
* <p>Note that the passed level value is a raw scalar. UI controls should be scaled
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index e89be08..8c8569a 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -707,7 +707,9 @@ public class MediaScanner
map.put(MediaStore.MediaColumns.MIME_TYPE, mMimeType);
map.put(MediaStore.MediaColumns.IS_DRM, mIsDrm);
- if (!mNoMedia) {
+ if (mNoMedia) {
+ map.put(MediaStore.MediaColumns.NO_MEDIA, true);
+ } else {
if (MediaFile.isVideoFileType(mFileType)) {
map.put(Video.Media.ARTIST, (mArtist != null && mArtist.length() > 0
? mArtist : MediaStore.UNKNOWN_STRING));
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 77c25d1..0098537 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -453,7 +453,6 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
}
void AwesomePlayer::reset() {
- LOGI("reset");
Mutex::Autolock autoLock(mLock);
reset_l();
}
@@ -467,10 +466,8 @@ void AwesomePlayer::reset_l() {
Playback::STOP, 0);
mDecryptHandle = NULL;
mDrmManagerClient = NULL;
- LOGI("DRM manager client stopped");
}
-
if (mFlags & PLAYING) {
uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
@@ -503,7 +500,6 @@ void AwesomePlayer::reset_l() {
mPreparedCondition.wait(mLock);
}
- LOGI("cancel player events");
cancelPlayerEvents();
mWVMExtractor.clear();
@@ -890,7 +886,11 @@ status_t AwesomePlayer::play_l() {
CHECK(!(mFlags & AUDIO_RUNNING));
if (mVideoSource == NULL) {
- status_t err = startAudioPlayer_l();
+ // We don't want to post an error notification at this point,
+ // the error returned from MediaPlayer::start() will suffice.
+
+ status_t err = startAudioPlayer_l(
+ false /* sendErrorNotification */);
if (err != OK) {
delete mAudioPlayer;
@@ -940,7 +940,7 @@ status_t AwesomePlayer::play_l() {
return OK;
}
-status_t AwesomePlayer::startAudioPlayer_l() {
+status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) {
CHECK(!(mFlags & AUDIO_RUNNING));
if (mAudioSource == NULL || mAudioPlayer == NULL) {
@@ -958,7 +958,10 @@ status_t AwesomePlayer::startAudioPlayer_l() {
true /* sourceAlreadyStarted */);
if (err != OK) {
- notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ if (sendErrorNotification) {
+ notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ }
+
return err;
}
@@ -1684,7 +1687,7 @@ void AwesomePlayer::onVideoEvent() {
if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
status_t err = startAudioPlayer_l();
if (err != OK) {
- LOGE("Startung the audio player failed w/ err %d", err);
+ LOGE("Starting the audio player failed w/ err %d", err);
return;
}
}
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index b7b0dc0..5cab60e 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -3539,7 +3539,7 @@ status_t OMXCodec::start(MetaData *meta) {
}
status_t OMXCodec::stop() {
- CODEC_LOGI("stop mState=%d", mState);
+ CODEC_LOGV("stop mState=%d", mState);
Mutex::Autolock autoLock(mLock);
@@ -3601,7 +3601,6 @@ status_t OMXCodec::stop() {
mLeftOverBuffer = NULL;
}
- CODEC_LOGI("stopping video source");
mSource->stop();
CODEC_LOGI("stopped in state %d", mState);
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index e069b4d..95f2ae8 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -291,7 +291,7 @@ private:
void finishSeekIfNecessary(int64_t videoTimeUs);
void ensureCacheIsFetching_l();
- status_t startAudioPlayer_l();
+ status_t startAudioPlayer_l(bool sendErrorNotification = true);
void postAudioSeekComplete_l();
void shutdownVideoDecoder_l();