summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp10
-rw-r--r--media/libmedia/Android.mk3
-rw-r--r--media/libmedia/AudioPolicy.cpp115
-rw-r--r--media/libmedia/AudioRecord.cpp29
-rw-r--r--media/libmedia/AudioSystem.cpp310
-rw-r--r--media/libmedia/AudioTrack.cpp231
-rw-r--r--media/libmedia/AudioTrackShared.cpp97
-rw-r--r--media/libmedia/IAudioPolicyService.cpp234
-rw-r--r--media/libmedia/ICrypto.cpp22
-rw-r--r--media/libmedia/IDrm.cpp43
-rw-r--r--media/libmedia/Visualizer.cpp19
-rw-r--r--media/libmediaplayerservice/Crypto.cpp8
-rw-r--r--media/libmediaplayerservice/Crypto.h2
-rw-r--r--media/libmediaplayerservice/Drm.cpp34
-rw-r--r--media/libmediaplayerservice/Drm.h2
-rw-r--r--media/libmediaplayerservice/MediaPlayerFactory.cpp35
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp5
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp4
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk2
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.cpp127
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.h8
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp14
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.h3
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp804
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h56
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp361
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h59
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp1183
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h124
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp200
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h97
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp443
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h72
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp60
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp163
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h23
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h2
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp67
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h4
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.cpp134
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.h16
-rw-r--r--media/libstagefright/ACodec.cpp58
-rw-r--r--media/libstagefright/AwesomePlayer.cpp12
-rw-r--r--media/libstagefright/CameraSource.cpp100
-rw-r--r--media/libstagefright/MPEG4Writer.cpp5
-rw-r--r--media/libstagefright/MediaCodec.cpp23
-rw-r--r--media/libstagefright/MediaCodecSource.cpp268
-rw-r--r--media/libstagefright/OMXCodec.cpp326
-rw-r--r--media/libstagefright/OggExtractor.cpp75
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp2
-rw-r--r--media/libstagefright/avc_utils.cpp27
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp30
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp347
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h7
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp11
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp14
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h2
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp357
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h14
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp12
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp323
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h38
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp6
-rw-r--r--media/libstagefright/data/media_codecs_google_audio.xml2
-rw-r--r--media/libstagefright/data/media_codecs_google_video.xml2
-rw-r--r--media/libstagefright/foundation/ADebug.cpp117
-rw-r--r--media/libstagefright/foundation/AStringUtils.cpp77
-rw-r--r--media/libstagefright/foundation/Android.mk3
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp10
-rw-r--r--media/libstagefright/httplive/LiveSession.h2
-rw-r--r--media/libstagefright/httplive/M3UParser.cpp35
-rw-r--r--media/libstagefright/httplive/M3UParser.h2
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp93
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h18
-rw-r--r--media/libstagefright/include/SoftVideoDecoderOMXComponent.h8
-rw-r--r--media/libstagefright/include/SoftVideoEncoderOMXComponent.h38
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp9
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp65
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h3
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp6
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp22
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp2
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h2
-rw-r--r--media/libstagefright/omx/OMX.cpp2
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp570
-rw-r--r--media/libstagefright/omx/SimpleSoftOMXComponent.cpp24
-rw-r--r--media/libstagefright/omx/SoftOMXComponent.cpp2
-rw-r--r--media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp86
-rw-r--r--media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp307
-rw-r--r--media/libstagefright/tests/Utils_test.cpp102
-rw-r--r--media/mtp/MtpDataPacket.cpp160
-rw-r--r--media/mtp/MtpDataPacket.h25
-rw-r--r--media/mtp/MtpDevice.cpp32
-rw-r--r--media/mtp/MtpDevice.h2
-rw-r--r--media/mtp/MtpDeviceInfo.cpp33
-rw-r--r--media/mtp/MtpDeviceInfo.h4
-rw-r--r--media/mtp/MtpObjectInfo.cpp42
-rw-r--r--media/mtp/MtpObjectInfo.h2
-rw-r--r--media/mtp/MtpPacket.cpp2
-rw-r--r--media/mtp/MtpPacket.h8
-rw-r--r--media/mtp/MtpProperty.cpp93
-rw-r--r--media/mtp/MtpProperty.h14
-rw-r--r--media/mtp/MtpRequestPacket.cpp20
-rw-r--r--media/mtp/MtpRequestPacket.h4
-rw-r--r--media/mtp/MtpServer.cpp97
-rw-r--r--media/mtp/MtpStorageInfo.cpp20
-rw-r--r--media/mtp/MtpStorageInfo.h2
-rw-r--r--media/mtp/MtpStringBuffer.cpp13
-rw-r--r--media/mtp/MtpStringBuffer.h2
-rw-r--r--media/ndk/NdkMediaExtractor.cpp27
110 files changed, 5442 insertions, 4051 deletions
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 6c2cbe3..3ddeb4e 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -1236,10 +1236,12 @@ int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t
bool useVirtualizer = false;
if (VirtualizerIsDeviceSupported(forcedDevice) != 0) {
- // forced device is not supported, make it behave as a reset of forced mode
- forcedDevice = AUDIO_DEVICE_NONE;
- // but return an error
- status = -EINVAL;
+ if (forcedDevice != AUDIO_DEVICE_NONE) {
+ //forced device is not supported, make it behave as a reset of forced mode
+ forcedDevice = AUDIO_DEVICE_NONE;
+ // but return an error
+ status = -EINVAL;
+ }
}
if (forcedDevice == AUDIO_DEVICE_NONE) {
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 50d9a0f..bcec9c9 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -59,7 +59,8 @@ LOCAL_SRC_FILES:= \
MemoryLeakTrackUtil.cpp \
SoundPool.cpp \
SoundPoolThread.cpp \
- StringArray.cpp
+ StringArray.cpp \
+ AudioPolicy.cpp
LOCAL_SRC_FILES += ../libnbaio/roundup.c
diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp
new file mode 100644
index 0000000..d2d0971
--- /dev/null
+++ b/media/libmedia/AudioPolicy.cpp
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioPolicy"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <media/AudioPolicy.h>
+
+namespace android {
+
+//
+// AttributeMatchCriterion implementation
+//
+AttributeMatchCriterion::AttributeMatchCriterion(audio_usage_t usage,
+ audio_source_t source,
+ uint32_t rule)
+: mRule(rule)
+{
+ if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+ mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+ mAttr.mUsage = usage;
+ } else {
+ mAttr.mSource = source;
+ }
+}
+
+status_t AttributeMatchCriterion::readFromParcel(Parcel *parcel)
+{
+ mRule = parcel->readInt32();
+ if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+ mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+ mAttr.mUsage = (audio_usage_t)parcel->readInt32();
+ } else {
+ mAttr.mSource = (audio_source_t)parcel->readInt32();
+ }
+ return NO_ERROR;
+}
+
+status_t AttributeMatchCriterion::writeToParcel(Parcel *parcel) const
+{
+ parcel->writeInt32(mRule);
+ parcel->writeInt32(mAttr.mUsage);
+ return NO_ERROR;
+}
+
+//
+// AudioMix implementation
+//
+
+status_t AudioMix::readFromParcel(Parcel *parcel)
+{
+ mMixType = parcel->readInt32();
+ mFormat.sample_rate = (uint32_t)parcel->readInt32();
+ mFormat.channel_mask = (audio_channel_mask_t)parcel->readInt32();
+ mFormat.format = (audio_format_t)parcel->readInt32();
+ mRouteFlags = parcel->readInt32();
+ mRegistrationId = parcel->readString8();
+ size_t size = (size_t)parcel->readInt32();
+ if (size > MAX_CRITERIA_PER_MIX) {
+ size = MAX_CRITERIA_PER_MIX;
+ }
+ for (size_t i = 0; i < size; i++) {
+ AttributeMatchCriterion criterion;
+ if (criterion.readFromParcel(parcel) == NO_ERROR) {
+ mCriteria.add(criterion);
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t AudioMix::writeToParcel(Parcel *parcel) const
+{
+ parcel->writeInt32(mMixType);
+ parcel->writeInt32(mFormat.sample_rate);
+ parcel->writeInt32(mFormat.channel_mask);
+ parcel->writeInt32(mFormat.format);
+ parcel->writeInt32(mRouteFlags);
+ parcel->writeString8(mRegistrationId);
+ size_t size = mCriteria.size();
+ if (size > MAX_CRITERIA_PER_MIX) {
+ size = MAX_CRITERIA_PER_MIX;
+ }
+ size_t sizePosition = parcel->dataPosition();
+ parcel->writeInt32(size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = parcel->dataPosition();
+ if (mCriteria[i].writeToParcel(parcel) != NO_ERROR) {
+ parcel->setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = parcel->dataPosition();
+ parcel->setDataPosition(sizePosition);
+ parcel->writeInt32(finalSize);
+ parcel->setDataPosition(position);
+ }
+ return NO_ERROR;
+}
+
+}; // namespace android
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 9d92cfe..8ad6c8b 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -82,14 +82,16 @@ AudioRecord::AudioRecord(
uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ const audio_attributes_t* pAttributes)
: mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
mProxy(NULL)
{
mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
- notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags);
+ notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
+ pAttributes);
}
AudioRecord::~AudioRecord()
@@ -126,7 +128,8 @@ status_t AudioRecord::set(
bool threadCanCallJava,
int sessionId,
transfer_type transferType,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ const audio_attributes_t* pAttributes)
{
ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
"notificationFrames %u, sessionId %d, transferType %d, flags %#x",
@@ -168,7 +171,15 @@ status_t AudioRecord::set(
if (inputSource == AUDIO_SOURCE_DEFAULT) {
inputSource = AUDIO_SOURCE_MIC;
}
- mInputSource = inputSource;
+ if (pAttributes == NULL) {
+ memset(&mAttributes, 0, sizeof(audio_attributes_t));
+ mAttributes.source = inputSource;
+ } else {
+ // stream type shouldn't be looked at, this track has audio attributes
+ memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
+ ALOGV("Building AudioRecord with attributes: source=%d flags=0x%x tags=[%s]",
+ mAttributes.source, mAttributes.flags, mAttributes.tags);
+ }
if (sampleRate == 0) {
ALOGE("Invalid sample rate %u", sampleRate);
@@ -444,12 +455,14 @@ status_t AudioRecord::openRecord_l(size_t epoch)
}
}
- audio_io_handle_t input = AudioSystem::getInput(mInputSource, mSampleRate, mFormat,
- mChannelMask, mSessionId, mFlags);
- if (input == AUDIO_IO_HANDLE_NONE) {
+ audio_io_handle_t input;
+ status = AudioSystem::getInputForAttr(&mAttributes, &input, (audio_session_t)mSessionId,
+ mSampleRate, mFormat, mChannelMask, mFlags);
+
+ if (status != NO_ERROR) {
ALOGE("Could not get audio input for record source %d, sample rate %u, format %#x, "
"channel mask %#x, session %d, flags %#x",
- mInputSource, mSampleRate, mFormat, mChannelMask, mSessionId, mFlags);
+ mAttributes.source, mSampleRate, mFormat, mChannelMask, mSessionId, mFlags);
return BAD_VALUE;
}
{
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index dda3657..9cae21c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -32,6 +32,9 @@ namespace android {
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockCache;
+Mutex AudioSystem::gLockAPS;
+Mutex AudioSystem::gLockAPC;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
@@ -48,33 +51,40 @@ size_t AudioSystem::gInBuffSize = 0; // zero indicates cache is invalid
sp<AudioSystem::AudioPortCallback> AudioSystem::gAudioPortCallback;
// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
-{
- Mutex::Autolock _l(gLock);
- if (gAudioFlinger == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.audio_flinger"));
- if (binder != 0)
- break;
- ALOGW("AudioFlinger not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
- if (gAudioFlingerClient == NULL) {
- gAudioFlingerClient = new AudioFlingerClient();
- } else {
- if (gAudioErrorCallback) {
- gAudioErrorCallback(NO_ERROR);
+const sp<IAudioFlinger> AudioSystem::get_audio_flinger()
+{
+ sp<IAudioFlinger> af;
+ sp<AudioFlingerClient> afc;
+ {
+ Mutex::Autolock _l(gLock);
+ if (gAudioFlinger == 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ do {
+ binder = sm->getService(String16("media.audio_flinger"));
+ if (binder != 0)
+ break;
+ ALOGW("AudioFlinger not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while (true);
+ if (gAudioFlingerClient == NULL) {
+ gAudioFlingerClient = new AudioFlingerClient();
+ } else {
+ if (gAudioErrorCallback) {
+ gAudioErrorCallback(NO_ERROR);
+ }
}
+ binder->linkToDeath(gAudioFlingerClient);
+ gAudioFlinger = interface_cast<IAudioFlinger>(binder);
+ LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
+ afc = gAudioFlingerClient;
}
- binder->linkToDeath(gAudioFlingerClient);
- gAudioFlinger = interface_cast<IAudioFlinger>(binder);
- gAudioFlinger->registerClient(gAudioFlingerClient);
+ af = gAudioFlinger;
}
- ALOGE_IF(gAudioFlinger==0, "no AudioFlinger!?");
-
- return gAudioFlinger;
+ if (afc != 0) {
+ af->registerClient(afc);
+ }
+ return af;
}
/* static */ status_t AudioSystem::checkAudioFlinger()
@@ -245,36 +255,23 @@ status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream
return getSamplingRate(output, samplingRate);
}
-status_t AudioSystem::getOutputSamplingRateForAttr(uint32_t* samplingRate,
- const audio_attributes_t *attr)
-{
- if (attr == NULL) {
- return BAD_VALUE;
- }
- audio_io_handle_t output = getOutputForAttr(attr);
- if (output == 0) {
- return PERMISSION_DENIED;
- }
- return getSamplingRate(output, samplingRate);
-}
-
status_t AudioSystem::getSamplingRate(audio_io_handle_t output,
uint32_t* samplingRate)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ Mutex::Autolock _l(gLockCache);
+
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
ALOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output);
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*samplingRate = af->sampleRate(output);
+ gLockCache.lock();
} else {
ALOGV("getOutputSamplingRate() reading from output desc");
*samplingRate = outputDesc->samplingRate;
- gLock.unlock();
}
if (*samplingRate == 0) {
ALOGE("AudioSystem::getSamplingRate failed for output %d", output);
@@ -305,18 +302,18 @@ status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_
status_t AudioSystem::getFrameCount(audio_io_handle_t output,
size_t* frameCount)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+
+ Mutex::Autolock _l(gLockCache);
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*frameCount = af->frameCount(output);
+ gLockCache.lock();
} else {
*frameCount = outputDesc->frameCount;
- gLock.unlock();
}
if (*frameCount == 0) {
ALOGE("AudioSystem::getFrameCount failed for output %d", output);
@@ -347,18 +344,18 @@ status_t AudioSystem::getOutputLatency(uint32_t* latency, audio_stream_type_t st
status_t AudioSystem::getLatency(audio_io_handle_t output,
uint32_t* latency)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ Mutex::Autolock _l(gLockCache);
+
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*latency = af->latency(output);
+ gLockCache.lock();
} else {
*latency = outputDesc->latency;
- gLock.unlock();
}
ALOGV("getLatency() output %d, latency %d", output, *latency);
@@ -369,24 +366,24 @@ status_t AudioSystem::getLatency(audio_io_handle_t output,
status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize)
{
- gLock.lock();
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) {
+ return PERMISSION_DENIED;
+ }
+ Mutex::Autolock _l(gLockCache);
// Do we have a stale gInBufferSize or are we requesting the input buffer size for new values
size_t inBuffSize = gInBuffSize;
if ((inBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat)
|| (channelMask != gPrevInChannelMask)) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) {
- return PERMISSION_DENIED;
- }
+ gLockCache.unlock();
inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
+ gLockCache.lock();
if (inBuffSize == 0) {
ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
sampleRate, format, channelMask);
return BAD_VALUE;
}
// A benign race is possible here: we could overwrite a fresher cache entry
- gLock.lock();
// save the request params
gPrevInSamplingRate = sampleRate;
gPrevInFormat = format;
@@ -394,7 +391,6 @@ status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t for
gInBuffSize = inBuffSize;
}
- gLock.unlock();
*buffSize = inBuffSize;
return NO_ERROR;
@@ -461,14 +457,21 @@ audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId)
void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
{
- Mutex::Autolock _l(AudioSystem::gLock);
+ audio_error_callback cb = NULL;
+ {
+ Mutex::Autolock _l(AudioSystem::gLock);
+ AudioSystem::gAudioFlinger.clear();
+ cb = gAudioErrorCallback;
+ }
- AudioSystem::gAudioFlinger.clear();
- // clear output handles and stream to output map caches
- AudioSystem::gOutputs.clear();
+ {
+ // clear output handles and stream to output map caches
+ Mutex::Autolock _l(gLockCache);
+ AudioSystem::gOutputs.clear();
+ }
- if (gAudioErrorCallback) {
- gAudioErrorCallback(DEAD_OBJECT);
+ if (cb) {
+ cb(DEAD_OBJECT);
}
ALOGW("AudioFlinger server died!");
}
@@ -481,7 +484,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
if (ioHandle == AUDIO_IO_HANDLE_NONE) return;
- Mutex::Autolock _l(AudioSystem::gLock);
+ Mutex::Autolock _l(AudioSystem::gLockCache);
switch (event) {
case STREAM_CONFIG_CHANGED:
@@ -543,55 +546,44 @@ void AudioSystem::setErrorCallback(audio_error_callback cb)
gAudioErrorCallback = cb;
}
-
-bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType)
-{
- switch (streamType) {
- case AUDIO_STREAM_MUSIC:
- case AUDIO_STREAM_VOICE_CALL:
- case AUDIO_STREAM_BLUETOOTH_SCO:
- case AUDIO_STREAM_SYSTEM:
- return true;
- default:
- return false;
- }
-}
-
-
// client singleton for AudioPolicyService binder interface
+// protected by gLockAPS
sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
// establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
-{
- gLock.lock();
- if (gAudioPolicyService == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.audio_policy"));
- if (binder != 0)
- break;
- ALOGW("AudioPolicyService not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
- if (gAudioPolicyServiceClient == NULL) {
- gAudioPolicyServiceClient = new AudioPolicyServiceClient();
+const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service()
+{
+ sp<IAudioPolicyService> ap;
+ sp<AudioPolicyServiceClient> apc;
+ {
+ Mutex::Autolock _l(gLockAPS);
+ if (gAudioPolicyService == 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ do {
+ binder = sm->getService(String16("media.audio_policy"));
+ if (binder != 0)
+ break;
+ ALOGW("AudioPolicyService not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while (true);
+ if (gAudioPolicyServiceClient == NULL) {
+ gAudioPolicyServiceClient = new AudioPolicyServiceClient();
+ }
+ binder->linkToDeath(gAudioPolicyServiceClient);
+ gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
+ LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
+ apc = gAudioPolicyServiceClient;
}
- binder->linkToDeath(gAudioPolicyServiceClient);
- gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
- gLock.unlock();
- // Registering the client takes the AudioPolicyService lock.
- // Don't hold the AudioSystem lock at the same time.
- gAudioPolicyService->registerClient(gAudioPolicyServiceClient);
- } else {
- // There exists a benign race condition where gAudioPolicyService
- // is set, but gAudioPolicyServiceClient is not yet registered.
- gLock.unlock();
+ ap = gAudioPolicyService;
+ }
+ if (apc != 0) {
+ ap->registerClient(apc);
}
- return gAudioPolicyService;
+
+ return ap;
}
// ---------------------------------------------------------------------------
@@ -657,22 +649,26 @@ audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream,
return aps->getOutput(stream, samplingRate, format, channelMask, flags, offloadInfo);
}
-audio_io_handle_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
+status_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *output,
+ audio_session_t session,
+ audio_stream_type_t *stream,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
{
- if (attr == NULL) return 0;
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return 0;
- return aps->getOutputForAttr(attr, samplingRate, format, channelMask, flags, offloadInfo);
+ if (aps == 0) return NO_INIT;
+ return aps->getOutputForAttr(attr, output, session, stream,
+ samplingRate, format, channelMask,
+ flags, offloadInfo);
}
status_t AudioSystem::startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -681,30 +677,33 @@ status_t AudioSystem::startOutput(audio_io_handle_t output,
status_t AudioSystem::stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return aps->stopOutput(output, stream, session);
}
-void AudioSystem::releaseOutput(audio_io_handle_t output)
+void AudioSystem::releaseOutput(audio_io_handle_t output,
+ audio_stream_type_t stream,
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return;
- aps->releaseOutput(output);
+ aps->releaseOutput(output, stream, session);
}
-audio_io_handle_t AudioSystem::getInput(audio_source_t inputSource,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- int sessionId,
- audio_input_flags_t flags)
+status_t AudioSystem::getInputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *input,
+ audio_session_t session,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_input_flags_t flags)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return 0;
- return aps->getInput(inputSource, samplingRate, format, channelMask, sessionId, flags);
+ if (aps == 0) return NO_INIT;
+ return aps->getInputForAttr(attr, input, session, samplingRate, format, channelMask, flags);
}
status_t AudioSystem::startInput(audio_io_handle_t input,
@@ -856,9 +855,21 @@ status_t AudioSystem::setLowRamDevice(bool isLowRamDevice)
void AudioSystem::clearAudioConfigCache()
{
- Mutex::Autolock _l(gLock);
+ // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- gOutputs.clear();
+ {
+ Mutex::Autolock _l(gLockCache);
+ gOutputs.clear();
+ }
+ {
+ Mutex::Autolock _l(gLock);
+ gAudioFlinger.clear();
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ gAudioPolicyService.clear();
+ }
+ // Do not clear gAudioPortCallback
}
bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
@@ -920,7 +931,7 @@ status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)
void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack)
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
gAudioPortCallback = callBack;
}
@@ -947,23 +958,34 @@ audio_mode_t AudioSystem::getPhoneState()
return aps->getPhoneState();
}
+status_t AudioSystem::registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->registerPolicyMixes(mixes, registration);
+}
// ---------------------------------------------------------------------------
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
{
- Mutex::Autolock _l(gLock);
- if (gAudioPortCallback != 0) {
- gAudioPortCallback->onServiceDied();
+ {
+ Mutex::Autolock _l(gLockAPC);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onServiceDied();
+ }
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ AudioSystem::gAudioPolicyService.clear();
}
- AudioSystem::gAudioPolicyService.clear();
ALOGW("AudioPolicyService server died!");
}
void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
if (gAudioPortCallback != 0) {
gAudioPortCallback->onAudioPortListUpdate();
}
@@ -971,7 +993,7 @@ void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
if (gAudioPortCallback != 0) {
gAudioPortCallback->onAudioPatchListUpdate();
}
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index b18a528..6bdf865 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -28,6 +28,7 @@
#include <utils/Log.h>
#include <private/media/AudioTrackShared.h>
#include <media/IAudioFlinger.h>
+#include <media/AudioPolicyHelper.h>
#include <media/AudioResamplerPublic.h>
#define WAIT_PERIOD_MS 10
@@ -281,38 +282,21 @@ status_t AudioTrack::set(
if (streamType == AUDIO_STREAM_DEFAULT) {
streamType = AUDIO_STREAM_MUSIC;
}
-
if (pAttributes == NULL) {
- if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
+ if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
ALOGE("Invalid stream type %d", streamType);
return BAD_VALUE;
}
- setAttributesFromStreamType(streamType);
mStreamType = streamType;
+
} else {
- if (!isValidAttributes(pAttributes)) {
- ALOGE("Invalid attributes: usage=%d content=%d flags=0x%x tags=[%s]",
- pAttributes->usage, pAttributes->content_type, pAttributes->flags,
- pAttributes->tags);
- }
// stream type shouldn't be looked at, this track has audio attributes
memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
- setStreamTypeFromAttributes(mAttributes);
ALOGV("Building AudioTrack with attributes: usage=%d content=%d flags=0x%x tags=[%s]",
mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
+ mStreamType = AUDIO_STREAM_DEFAULT;
}
- status_t status;
- if (sampleRate == 0) {
- status = AudioSystem::getOutputSamplingRateForAttr(&sampleRate, &mAttributes);
- if (status != NO_ERROR) {
- ALOGE("Could not get output sample rate for stream type %d; status %d",
- mStreamType, status);
- return status;
- }
- }
- mSampleRate = sampleRate;
-
// these below should probably come from the audioFlinger too...
if (format == AUDIO_FORMAT_DEFAULT) {
format = AUDIO_FORMAT_PCM_16_BIT;
@@ -350,10 +334,6 @@ status_t AudioTrack::set(
// FIXME why can't we allow direct AND fast?
((flags | AUDIO_OUTPUT_FLAG_DIRECT) & ~AUDIO_OUTPUT_FLAG_FAST);
}
- // only allow deep buffering for music stream type
- if (mStreamType != AUDIO_STREAM_MUSIC) {
- flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
- }
if (flags & AUDIO_OUTPUT_FLAG_DIRECT) {
if (audio_is_linear_pcm(format)) {
@@ -371,6 +351,12 @@ status_t AudioTrack::set(
// so no need to check for specific PCM formats here
}
+ // sampling rate must be specified for direct outputs
+ if (sampleRate == 0 && (flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
+ return BAD_VALUE;
+ }
+ mSampleRate = sampleRate;
+
// Make copy of input parameter offloadInfo so that in the future:
// (a) createTrack_l doesn't need it as an input parameter
// (b) we can support re-creation of offloaded tracks
@@ -388,7 +374,11 @@ status_t AudioTrack::set(
mReqFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
mNotificationFramesAct = 0;
- mSessionId = sessionId;
+ if (sessionId == AUDIO_SESSION_ALLOCATE) {
+ mSessionId = AudioSystem::newAudioUniqueId();
+ } else {
+ mSessionId = sessionId;
+ }
int callingpid = IPCThreadState::self()->getCallingPid();
int mypid = getpid();
if (uid == -1 || (callingpid != mypid)) {
@@ -411,7 +401,7 @@ status_t AudioTrack::set(
}
// create the IAudioTrack
- status = createTrack_l();
+ status_t status = createTrack_l();
if (status != NO_ERROR) {
if (mAudioTrackThread != 0) {
@@ -678,15 +668,18 @@ status_t AudioTrack::setSampleRate(uint32_t rate)
return INVALID_OPERATION;
}
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE) {
+ return NO_INIT;
+ }
uint32_t afSamplingRate;
- if (AudioSystem::getOutputSamplingRateForAttr(&afSamplingRate, &mAttributes) != NO_ERROR) {
+ if (AudioSystem::getSamplingRate(mOutput, &afSamplingRate) != NO_ERROR) {
return NO_INIT;
}
if (rate == 0 || rate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {
return BAD_VALUE;
}
- AutoMutex lock(mLock);
mSampleRate = rate;
mProxy->setSampleRate(rate);
@@ -742,8 +735,7 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
- // FIXME If setting a loop also sets position to start of loop, then
- // this is correct. Otherwise it should be removed.
+ // Setting the loop will reset next notification update period (like setPosition).
mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0;
mStaticProxy->setLoop(loopStart, loopEnd, loopCount);
@@ -859,6 +851,10 @@ status_t AudioTrack::getPosition(uint32_t *position)
// due to hardware latency. We leave this behavior for now.
*position = dspFrames;
} else {
+ if (mCblk->mFlags & CBLK_INVALID) {
+ restoreTrack_l("getPosition");
+ }
+
// IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
*position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
0 : updateAndGetPosition_l();
@@ -915,24 +911,38 @@ status_t AudioTrack::attachAuxEffect(int effectId)
return status;
}
+audio_stream_type_t AudioTrack::streamType() const
+{
+ if (mStreamType == AUDIO_STREAM_DEFAULT) {
+ return audio_attributes_to_stream_type(&mAttributes);
+ }
+ return mStreamType;
+}
+
// -------------------------------------------------------------------------
// must be called with mLock held
status_t AudioTrack::createTrack_l()
{
- status_t status;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
if (audioFlinger == 0) {
ALOGE("Could not get audioflinger");
return NO_INIT;
}
- audio_io_handle_t output = AudioSystem::getOutputForAttr(&mAttributes, mSampleRate, mFormat,
- mChannelMask, mFlags, mOffloadInfo);
- if (output == AUDIO_IO_HANDLE_NONE) {
+ audio_io_handle_t output;
+ audio_stream_type_t streamType = mStreamType;
+ audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
+ status_t status = AudioSystem::getOutputForAttr(attr, &output,
+ (audio_session_t)mSessionId, &streamType,
+ mSampleRate, mFormat, mChannelMask,
+ mFlags, mOffloadInfo);
+
+
+ if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) {
ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x,"
" channel mask %#x, flags %#x",
- mStreamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
+ streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
return BAD_VALUE;
}
{
@@ -961,7 +971,9 @@ status_t AudioTrack::createTrack_l()
ALOGE("getSamplingRate(output=%d) status %d", output, status);
goto release;
}
-
+ if (mSampleRate == 0) {
+ mSampleRate = afSampleRate;
+ }
// Client decides whether the track is TIMED (see below), but can only express a preference
// for FAST. Server will perform additional tests.
if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) && !((
@@ -1084,7 +1096,7 @@ status_t AudioTrack::createTrack_l()
size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
// but we will still need the original value also
- sp<IAudioTrack> track = audioFlinger->createTrack(mStreamType,
+ sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
mSampleRate,
// AudioFlinger only sees 16-bit PCM
mFormat == AUDIO_FORMAT_PCM_8_BIT &&
@@ -1234,7 +1246,7 @@ status_t AudioTrack::createTrack_l()
}
release:
- AudioSystem::releaseOutput(output);
+ AudioSystem::releaseOutput(output, streamType, (audio_session_t)mSessionId);
if (status == NO_ERROR) {
status = NO_INIT;
}
@@ -1830,7 +1842,7 @@ status_t AudioTrack::restoreTrack_l(const char *from)
status_t result;
// refresh the audio configuration cache in this process to make sure we get new
- // output parameters in createTrack_l()
+ // output parameters and new IAudioFlinger in createTrack_l()
AudioSystem::clearAudioConfigCache();
if (isOffloadedOrDirect_l()) {
@@ -1938,6 +1950,10 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
break;
}
+ if (mCblk->mFlags & CBLK_INVALID) {
+ restoreTrack_l("getTimestamp");
+ }
+
// The presented frame count must always lag behind the consumed frame count.
// To avoid a race, read the presented frames first. This ensures that presented <= consumed.
status_t status = mAudioTrack->getTimestamp(timestamp);
@@ -2068,143 +2084,6 @@ uint32_t AudioTrack::getUnderrunFrames() const
return mProxy->getUnderrunFrames();
}
-void AudioTrack::setAttributesFromStreamType(audio_stream_type_t streamType) {
- mAttributes.flags = 0x0;
-
- switch(streamType) {
- case AUDIO_STREAM_DEFAULT:
- case AUDIO_STREAM_MUSIC:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
- mAttributes.usage = AUDIO_USAGE_MEDIA;
- break;
- case AUDIO_STREAM_VOICE_CALL:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
- mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
- break;
- case AUDIO_STREAM_ENFORCED_AUDIBLE:
- mAttributes.flags |= AUDIO_FLAG_AUDIBILITY_ENFORCED;
- // intended fall through, attributes in common with STREAM_SYSTEM
- case AUDIO_STREAM_SYSTEM:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
- mAttributes.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
- break;
- case AUDIO_STREAM_RING:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
- mAttributes.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
- break;
- case AUDIO_STREAM_ALARM:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
- mAttributes.usage = AUDIO_USAGE_ALARM;
- break;
- case AUDIO_STREAM_NOTIFICATION:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
- mAttributes.usage = AUDIO_USAGE_NOTIFICATION;
- break;
- case AUDIO_STREAM_BLUETOOTH_SCO:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
- mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
- mAttributes.flags |= AUDIO_FLAG_SCO;
- break;
- case AUDIO_STREAM_DTMF:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
- mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
- break;
- case AUDIO_STREAM_TTS:
- mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
- mAttributes.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
- break;
- default:
- ALOGE("invalid stream type %d when converting to attributes", streamType);
- }
-}
-
-void AudioTrack::setStreamTypeFromAttributes(audio_attributes_t& aa) {
- // flags to stream type mapping
- if ((aa.flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) == AUDIO_FLAG_AUDIBILITY_ENFORCED) {
- mStreamType = AUDIO_STREAM_ENFORCED_AUDIBLE;
- return;
- }
- if ((aa.flags & AUDIO_FLAG_SCO) == AUDIO_FLAG_SCO) {
- mStreamType = AUDIO_STREAM_BLUETOOTH_SCO;
- return;
- }
-
- // usage to stream type mapping
- switch (aa.usage) {
- case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
- // TODO once AudioPolicyManager fully supports audio_attributes_t,
- // remove stream change based on phone state
- if (AudioSystem::getPhoneState() == AUDIO_MODE_RINGTONE) {
- mStreamType = AUDIO_STREAM_RING;
- break;
- }
- /// FALL THROUGH
- case AUDIO_USAGE_MEDIA:
- case AUDIO_USAGE_GAME:
- case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
- mStreamType = AUDIO_STREAM_MUSIC;
- return;
- case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
- mStreamType = AUDIO_STREAM_SYSTEM;
- return;
- case AUDIO_USAGE_VOICE_COMMUNICATION:
- mStreamType = AUDIO_STREAM_VOICE_CALL;
- return;
-
- case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
- mStreamType = AUDIO_STREAM_DTMF;
- return;
-
- case AUDIO_USAGE_ALARM:
- mStreamType = AUDIO_STREAM_ALARM;
- return;
- case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
- mStreamType = AUDIO_STREAM_RING;
- return;
-
- case AUDIO_USAGE_NOTIFICATION:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
- case AUDIO_USAGE_NOTIFICATION_EVENT:
- mStreamType = AUDIO_STREAM_NOTIFICATION;
- return;
-
- case AUDIO_USAGE_UNKNOWN:
- default:
- mStreamType = AUDIO_STREAM_MUSIC;
- }
-}
-
-bool AudioTrack::isValidAttributes(const audio_attributes_t *paa) {
- // has flags that map to a strategy?
- if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO)) != 0) {
- return true;
- }
-
- // has known usage?
- switch (paa->usage) {
- case AUDIO_USAGE_UNKNOWN:
- case AUDIO_USAGE_MEDIA:
- case AUDIO_USAGE_VOICE_COMMUNICATION:
- case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
- case AUDIO_USAGE_ALARM:
- case AUDIO_USAGE_NOTIFICATION:
- case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
- case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
- case AUDIO_USAGE_NOTIFICATION_EVENT:
- case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
- case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
- case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
- case AUDIO_USAGE_GAME:
- break;
- default:
- return false;
- }
- return true;
-}
// =========================================================================
void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index 52533f5..ff24475 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -25,6 +25,12 @@
namespace android {
+// used to clamp a value to size_t. TODO: move to another file.
+template <typename T>
+size_t clampToSize(T x) {
+ return x > SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
+}
+
audio_track_cblk_t::audio_track_cblk_t()
: mServer(0), mFutex(0), mMinimum(0),
mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0), mFlags(0)
@@ -350,7 +356,13 @@ size_t ClientProxy::getFramesFilled() {
void AudioTrackClientProxy::flush()
{
- mCblk->u.mStreaming.mFlush++;
+ // This works for mFrameCountP2 <= 2^30
+ size_t increment = mFrameCountP2 << 1;
+ size_t mask = increment - 1;
+ audio_track_cblk_t* cblk = mCblk;
+ int32_t newFlush = (cblk->u.mStreaming.mRear & mask) |
+ ((cblk->u.mStreaming.mFlush & ~mask) + increment);
+ android_atomic_release_store(newFlush, &cblk->u.mStreaming.mFlush);
}
bool AudioTrackClientProxy::clearStreamEndDone() {
@@ -493,7 +505,11 @@ void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int
newState.mLoopStart = (uint32_t) loopStart;
newState.mLoopEnd = (uint32_t) loopEnd;
newState.mLoopCount = loopCount;
- mBufferPosition = loopStart;
+ size_t bufferPosition;
+ if (loopCount == 0 || (bufferPosition = getBufferPosition()) >= loopEnd) {
+ bufferPosition = loopStart;
+ }
+ mBufferPosition = bufferPosition; // snapshot buffer position until loop is acknowledged.
(void) mMutator.push(newState);
}
@@ -538,17 +554,27 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear);
front = cblk->u.mStreaming.mFront;
if (flush != mFlush) {
- mFlush = flush;
// effectively obtain then release whatever is in the buffer
- android_atomic_release_store(rear, &cblk->u.mStreaming.mFront);
- if (front != rear) {
+ size_t mask = (mFrameCountP2 << 1) - 1;
+ int32_t newFront = (front & ~mask) | (flush & mask);
+ ssize_t filled = rear - newFront;
+ // Rather than shutting down on a corrupt flush, just treat it as a full flush
+ if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
+ ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x",
+ mFlush, flush, front, rear, mask, newFront, filled, filled);
+ newFront = rear;
+ }
+ mFlush = flush;
+ android_atomic_release_store(newFront, &cblk->u.mStreaming.mFront);
+ // There is no danger from a false positive, so err on the side of caution
+ if (true /*front != newFront*/) {
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
(void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
}
}
- front = rear;
+ front = newFront;
}
} else {
front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront);
@@ -670,6 +696,7 @@ size_t AudioTrackServerProxy::framesReady()
int32_t flush = cblk->u.mStreaming.mFlush;
if (flush != mFlush) {
+ // FIXME should return an accurate value, but over-estimate is better than under-estimate
return mFrameCount;
}
// the acquire might not be necessary since not doing a subsequent read
@@ -713,7 +740,8 @@ StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cbl
size_t frameCount, size_t frameSize)
: AudioTrackServerProxy(cblk, buffers, frameCount, frameSize),
mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0),
- mEnd(frameCount), mFramesReadyIsCalledByMultipleThreads(false)
+ mFramesReadySafe(frameCount), mFramesReady(frameCount),
+ mFramesReadyIsCalledByMultipleThreads(false)
{
mState.mLoopStart = 0;
mState.mLoopEnd = 0;
@@ -727,20 +755,11 @@ void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads()
size_t StaticAudioTrackServerProxy::framesReady()
{
- // FIXME
- // This is racy if called by normal mixer thread,
- // as we're reading 2 independent variables without a lock.
- // Can't call mObserver.poll(), as we might be called from wrong thread.
- // If looping is enabled, should return a higher number (since includes non-contiguous).
- size_t position = mPosition;
+ // Can't call pollPosition() from multiple threads.
if (!mFramesReadyIsCalledByMultipleThreads) {
- ssize_t positionOrStatus = pollPosition();
- if (positionOrStatus >= 0) {
- position = (size_t) positionOrStatus;
- }
+ (void) pollPosition();
}
- size_t end = mEnd;
- return position < end ? end - position : 0;
+ return mFramesReadySafe;
}
ssize_t StaticAudioTrackServerProxy::pollPosition()
@@ -757,25 +776,37 @@ ssize_t StaticAudioTrackServerProxy::pollPosition()
}
// ignore loopEnd
mPosition = position = loopStart;
- mEnd = mFrameCount;
+ mFramesReady = mFrameCount - mPosition;
mState.mLoopCount = 0;
valid = true;
- } else {
+ } else if (state.mLoopCount >= -1) {
if (loopStart < loopEnd && loopEnd <= mFrameCount &&
loopEnd - loopStart >= MIN_LOOP) {
- if (!(loopStart <= position && position < loopEnd)) {
+ // If the current position is greater than the end of the loop
+ // we "wrap" to the loop start. This might cause an audible pop.
+ if (position >= loopEnd) {
mPosition = position = loopStart;
}
- mEnd = loopEnd;
+ if (state.mLoopCount == -1) {
+ mFramesReady = INT64_MAX;
+ } else {
+ // mFramesReady is 64 bits to handle the effective number of frames
+ // that the static audio track contains, including loops.
+ // TODO: Later consider fixing overflow, but does not seem needed now
+ // as will not overflow if loopStart and loopEnd are Java "ints".
+ mFramesReady = int64_t(state.mLoopCount) * (loopEnd - loopStart)
+ + mFrameCount - mPosition;
+ }
mState = state;
valid = true;
}
}
- if (!valid) {
+ if (!valid || mPosition > mFrameCount) {
ALOGE("%s client pushed an invalid state, shutting down", __func__);
mIsShutdown = true;
return (ssize_t) NO_INIT;
}
+ mFramesReadySafe = clampToSize(mFramesReady);
// This may overflow, but client is not supposed to rely on it
mCblk->u.mStatic.mBufferPosition = (uint32_t) position;
}
@@ -800,9 +831,10 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
return (status_t) positionOrStatus;
}
size_t position = (size_t) positionOrStatus;
+ size_t end = mState.mLoopCount != 0 ? mState.mLoopEnd : mFrameCount;
size_t avail;
- if (position < mEnd) {
- avail = mEnd - position;
+ if (position < end) {
+ avail = end - position;
size_t wanted = buffer->mFrameCount;
if (avail < wanted) {
buffer->mFrameCount = avail;
@@ -815,7 +847,10 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
buffer->mFrameCount = 0;
buffer->mRaw = NULL;
}
- buffer->mNonContig = 0; // FIXME should be > 0 for looping
+ // As mFramesReady is the total remaining frames in the static audio track,
+ // it is always larger or equal to avail.
+ LOG_ALWAYS_FATAL_IF(mFramesReady < avail);
+ buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
mUnreleased = avail;
return NO_ERROR;
}
@@ -823,6 +858,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
{
size_t stepCount = buffer->mFrameCount;
+ LOG_ALWAYS_FATAL_IF(!(stepCount <= mFramesReady));
LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));
if (stepCount == 0) {
// prevent accidental re-use of buffer
@@ -839,11 +875,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount);
newPosition = mFrameCount;
} else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) {
+ newPosition = mState.mLoopStart;
if (mState.mLoopCount == -1 || --mState.mLoopCount != 0) {
- newPosition = mState.mLoopStart;
setFlags = CBLK_LOOP_CYCLE;
} else {
- mEnd = mFrameCount; // this is what allows playback to continue after the loop
setFlags = CBLK_LOOP_FINAL;
}
}
@@ -851,6 +886,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
setFlags |= CBLK_BUFFER_END;
}
mPosition = newPosition;
+ if (mFramesReady != INT64_MAX) {
+ mFramesReady -= stepCount;
+ }
+ mFramesReadySafe = clampToSize(mFramesReady);
cblk->mServer += stepCount;
// This may overflow, but client is not supposed to rely on it
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 99fd525..dbc7a9e 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -23,6 +23,7 @@
#include <binder/Parcel.h>
+#include <media/AudioEffect.h>
#include <media/IAudioPolicyService.h>
#include <system/audio.h>
@@ -40,7 +41,7 @@ enum {
START_OUTPUT,
STOP_OUTPUT,
RELEASE_OUTPUT,
- GET_INPUT,
+ GET_INPUT_FOR_ATTR,
START_INPUT,
STOP_INPUT,
RELEASE_INPUT,
@@ -68,7 +69,8 @@ enum {
GET_OUTPUT_FOR_ATTR,
ACQUIRE_SOUNDTRIGGER_SESSION,
RELEASE_SOUNDTRIGGER_SESSION,
- GET_PHONE_STATE
+ GET_PHONE_STATE,
+ REGISTER_POLICY_MIXES,
};
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -159,21 +161,45 @@ public:
return static_cast <audio_io_handle_t> (reply.readInt32());
}
- virtual audio_io_handle_t getOutputForAttr(
- const audio_attributes_t *attr,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
+ virtual status_t getOutputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *output,
+ audio_session_t session,
+ audio_stream_type_t *stream,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
if (attr == NULL) {
- ALOGE("Writing NULL audio attributes - shouldn't happen");
- return (audio_io_handle_t) 0;
+ if (stream == NULL) {
+ ALOGE("getOutputForAttr(): NULL audio attributes and stream type");
+ return BAD_VALUE;
+ }
+ if (*stream == AUDIO_STREAM_DEFAULT) {
+ ALOGE("getOutputForAttr unspecified stream type");
+ return BAD_VALUE;
+ }
+ }
+ if (output == NULL) {
+ ALOGE("getOutputForAttr NULL output - shouldn't happen");
+ return BAD_VALUE;
+ }
+ if (attr == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.write(attr, sizeof(audio_attributes_t));
+ }
+ data.writeInt32(session);
+ if (stream == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.writeInt32(*stream);
}
- data.write(attr, sizeof(audio_attributes_t));
data.writeInt32(samplingRate);
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channelMask);
@@ -185,62 +211,93 @@ public:
data.writeInt32(1);
data.write(offloadInfo, sizeof(audio_offload_info_t));
}
- remote()->transact(GET_OUTPUT_FOR_ATTR, data, &reply);
- return static_cast <audio_io_handle_t> (reply.readInt32());
+ status_t status = remote()->transact(GET_OUTPUT_FOR_ATTR, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = (status_t)reply.readInt32();
+ if (status != NO_ERROR) {
+ return status;
+ }
+ *output = (audio_io_handle_t)reply.readInt32();
+ if (stream != NULL) {
+ *stream = (audio_stream_type_t)reply.readInt32();
+ }
+ return status;
}
virtual status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32((int32_t) stream);
- data.writeInt32(session);
+ data.writeInt32((int32_t)session);
remote()->transact(START_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
virtual status_t stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32((int32_t) stream);
- data.writeInt32(session);
+ data.writeInt32((int32_t)session);
remote()->transact(STOP_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual void releaseOutput(audio_io_handle_t output)
+ virtual void releaseOutput(audio_io_handle_t output,
+ audio_stream_type_t stream,
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
+ data.writeInt32((int32_t)stream);
+ data.writeInt32((int32_t)session);
remote()->transact(RELEASE_OUTPUT, data, &reply);
}
- virtual audio_io_handle_t getInput(
- audio_source_t inputSource,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- int audioSession,
- audio_input_flags_t flags)
+ virtual status_t getInputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *input,
+ audio_session_t session,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_input_flags_t flags)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32((int32_t) inputSource);
+ if (attr == NULL) {
+ ALOGE("getInputForAttr NULL attr - shouldn't happen");
+ return BAD_VALUE;
+ }
+ if (input == NULL) {
+ ALOGE("getInputForAttr NULL input - shouldn't happen");
+ return BAD_VALUE;
+ }
+ data.write(attr, sizeof(audio_attributes_t));
+ data.writeInt32(session);
data.writeInt32(samplingRate);
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channelMask);
- data.writeInt32(audioSession);
data.writeInt32(flags);
- remote()->transact(GET_INPUT, data, &reply);
- return static_cast <audio_io_handle_t> (reply.readInt32());
+ status_t status = remote()->transact(GET_INPUT_FOR_ATTR, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = reply.readInt32();
+ if (status != NO_ERROR) {
+ return status;
+ }
+ *input = (audio_io_handle_t)reply.readInt32();
+ return NO_ERROR;
}
virtual status_t startInput(audio_io_handle_t input,
@@ -619,6 +676,38 @@ public:
}
return (audio_mode_t)reply.readInt32();
}
+
+ virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(registration ? 1 : 0);
+ size_t size = mixes.size();
+ if (size > MAX_MIXES_PER_POLICY) {
+ size = MAX_MIXES_PER_POLICY;
+ }
+ size_t sizePosition = data.dataPosition();
+ data.writeInt32(size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = data.dataPosition();
+ if (mixes[i].writeToParcel(&data) != NO_ERROR) {
+ data.setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = data.dataPosition();
+ data.setDataPosition(sizePosition);
+ data.writeInt32(finalSize);
+ data.setDataPosition(position);
+ }
+ status_t status = remote()->transact(REGISTER_POLICY_MIXES, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -704,8 +793,17 @@ status_t BnAudioPolicyService::onTransact(
case GET_OUTPUT_FOR_ATTR: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_attributes_t *attr = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
- data.read(attr, sizeof(audio_attributes_t));
+ audio_attributes_t attr;
+ bool hasAttributes = data.readInt32() != 0;
+ if (hasAttributes) {
+ data.read(&attr, sizeof(audio_attributes_t));
+ }
+ audio_session_t session = (audio_session_t)data.readInt32();
+ audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+ bool hasStream = data.readInt32() != 0;
+ if (hasStream) {
+ stream = (audio_stream_type_t)data.readInt32();
+ }
uint32_t samplingRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
@@ -716,13 +814,14 @@ status_t BnAudioPolicyService::onTransact(
if (hasOffloadInfo) {
data.read(&offloadInfo, sizeof(audio_offload_info_t));
}
- audio_io_handle_t output = getOutputForAttr(attr,
- samplingRate,
- format,
- channelMask,
- flags,
- hasOffloadInfo ? &offloadInfo : NULL);
- reply->writeInt32(static_cast <int>(output));
+ audio_io_handle_t output;
+ status_t status = getOutputForAttr(hasAttributes ? &attr : NULL,
+ &output, session, &stream,
+ samplingRate, format, channelMask,
+ flags, hasOffloadInfo ? &offloadInfo : NULL);
+ reply->writeInt32(status);
+ reply->writeInt32(output);
+ reply->writeInt32(stream);
return NO_ERROR;
} break;
@@ -731,7 +830,7 @@ status_t BnAudioPolicyService::onTransact(
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
audio_stream_type_t stream =
static_cast <audio_stream_type_t>(data.readInt32());
- int session = data.readInt32();
+ audio_session_t session = (audio_session_t)data.readInt32();
reply->writeInt32(static_cast <uint32_t>(startOutput(output,
stream,
session)));
@@ -743,7 +842,7 @@ status_t BnAudioPolicyService::onTransact(
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
audio_stream_type_t stream =
static_cast <audio_stream_type_t>(data.readInt32());
- int session = data.readInt32();
+ audio_session_t session = (audio_session_t)data.readInt32();
reply->writeInt32(static_cast <uint32_t>(stopOutput(output,
stream,
session)));
@@ -753,25 +852,29 @@ status_t BnAudioPolicyService::onTransact(
case RELEASE_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- releaseOutput(output);
+ audio_stream_type_t stream = (audio_stream_type_t)data.readInt32();
+ audio_session_t session = (audio_session_t)data.readInt32();
+ releaseOutput(output, stream, session);
return NO_ERROR;
} break;
- case GET_INPUT: {
+ case GET_INPUT_FOR_ATTR: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_source_t inputSource = (audio_source_t) data.readInt32();
+ audio_attributes_t attr;
+ data.read(&attr, sizeof(audio_attributes_t));
+ audio_session_t session = (audio_session_t)data.readInt32();
uint32_t samplingRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
- int audioSession = data.readInt32();
audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
- audio_io_handle_t input = getInput(inputSource,
- samplingRate,
- format,
- channelMask,
- audioSession,
- flags);
- reply->writeInt32(static_cast <int>(input));
+ audio_io_handle_t input;
+ status_t status = getInputForAttr(&attr, &input, session,
+ samplingRate, format, channelMask,
+ flags);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(input);
+ }
return NO_ERROR;
} break;
@@ -916,16 +1019,18 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
int audioSession = data.readInt32();
uint32_t count = data.readInt32();
+ if (count > AudioEffect::kMaxPreProcessing) {
+ count = AudioEffect::kMaxPreProcessing;
+ }
uint32_t retCount = count;
- effect_descriptor_t *descriptors =
- (effect_descriptor_t *)new char[count * sizeof(effect_descriptor_t)];
+ effect_descriptor_t *descriptors = new effect_descriptor_t[count];
status_t status = queryDefaultPreProcessing(audioSession, descriptors, &retCount);
reply->writeInt32(status);
if (status != NO_ERROR && status != NO_MEMORY) {
retCount = 0;
}
reply->writeInt32(retCount);
- if (retCount) {
+ if (retCount != 0) {
if (retCount < count) {
count = retCount;
}
@@ -1075,6 +1180,25 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
} break;
+ case REGISTER_POLICY_MIXES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ bool registration = data.readInt32() == 1;
+ Vector<AudioMix> mixes;
+ size_t size = (size_t)data.readInt32();
+ if (size > MAX_MIXES_PER_POLICY) {
+ size = MAX_MIXES_PER_POLICY;
+ }
+ for (size_t i = 0; i < size; i++) {
+ AudioMix mix;
+ if (mix.readFromParcel((Parcel*)&data) == NO_ERROR) {
+ mixes.add(mix);
+ }
+ }
+ status_t status = registerPolicyMixes(mixes, registration);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp
index 0d5f990..c26c5bf 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/media/libmedia/ICrypto.cpp
@@ -33,6 +33,7 @@ enum {
DESTROY_PLUGIN,
REQUIRES_SECURE_COMPONENT,
DECRYPT,
+ NOTIFY_RESOLUTION,
};
struct BpCrypto : public BpInterface<ICrypto> {
@@ -149,6 +150,15 @@ struct BpCrypto : public BpInterface<ICrypto> {
return result;
}
+ virtual void notifyResolution(
+ uint32_t width, uint32_t height) {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICrypto::getInterfaceDescriptor());
+ data.writeInt32(width);
+ data.writeInt32(height);
+ remote()->transact(NOTIFY_RESOLUTION, data, &reply);
+ }
+
private:
DISALLOW_EVIL_CONSTRUCTORS(BpCrypto);
};
@@ -290,10 +300,20 @@ status_t BnCrypto::onTransact(
return OK;
}
+ case NOTIFY_RESOLUTION:
+ {
+ CHECK_INTERFACE(ICrypto, data, reply);
+
+ int32_t width = data.readInt32();
+ int32_t height = data.readInt32();
+ notifyResolution(width, height);
+
+ return OK;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
} // namespace android
-
diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp
index 0efdce8..b08fa82 100644
--- a/media/libmedia/IDrm.cpp
+++ b/media/libmedia/IDrm.cpp
@@ -54,7 +54,9 @@ enum {
SIGN_RSA,
VERIFY,
SET_LISTENER,
- UNPROVISION_DEVICE
+ UNPROVISION_DEVICE,
+ GET_SECURE_STOP,
+ RELEASE_ALL_SECURE_STOPS
};
struct BpDrm : public BpInterface<IDrm> {
@@ -255,6 +257,17 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ writeVector(data, ssid);
+ remote()->transact(GET_SECURE_STOP, data, &reply);
+
+ readVector(reply, secureStop);
+ return reply.readInt32();
+ }
+
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -265,6 +278,15 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t releaseAllSecureStops() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ remote()->transact(RELEASE_ALL_SECURE_STOPS, data, &reply);
+
+ return reply.readInt32();
+ }
+
virtual status_t getPropertyString(String8 const &name, String8 &value) const {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -655,6 +677,17 @@ status_t BnDrm::onTransact(
return OK;
}
+ case GET_SECURE_STOP:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ Vector<uint8_t> ssid, secureStop;
+ readVector(data, ssid);
+ status_t result = getSecureStop(ssid, secureStop);
+ writeVector(reply, secureStop);
+ reply->writeInt32(result);
+ return OK;
+ }
+
case RELEASE_SECURE_STOPS:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -664,6 +697,13 @@ status_t BnDrm::onTransact(
return OK;
}
+ case RELEASE_ALL_SECURE_STOPS:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ reply->writeInt32(releaseAllSecureStops());
+ return OK;
+ }
+
case GET_PROPERTY_STRING:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -809,4 +849,3 @@ status_t BnDrm::onTransact(
}
} // namespace android
-
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index c146b8d..f91e3e4 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -52,6 +52,13 @@ Visualizer::Visualizer (int32_t priority,
Visualizer::~Visualizer()
{
+ ALOGV("Visualizer::~Visualizer()");
+ if (mCaptureThread != NULL) {
+ mCaptureThread->requestExitAndWait();
+ mCaptureThread.clear();
+ }
+ mCaptureCallBack = NULL;
+ mCaptureFlags = 0;
}
status_t Visualizer::setEnabled(bool enabled)
@@ -102,20 +109,18 @@ status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t
return INVALID_OPERATION;
}
- sp<CaptureThread> t = mCaptureThread;
- if (t != 0) {
- t->mLock.lock();
+ if (mCaptureThread != 0) {
+ mCaptureLock.unlock();
+ mCaptureThread->requestExitAndWait();
+ mCaptureLock.lock();
}
+
mCaptureThread.clear();
mCaptureCallBack = cbk;
mCaptureCbkUser = user;
mCaptureFlags = flags;
mCaptureRate = rate;
- if (t != 0) {
- t->mLock.unlock();
- }
-
if (cbk != NULL) {
mCaptureThread = new CaptureThread(*this, rate, ((flags & CAPTURE_CALL_JAVA) != 0));
}
diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp
index 62593b2..8ee7c0b 100644
--- a/media/libmediaplayerservice/Crypto.cpp
+++ b/media/libmediaplayerservice/Crypto.cpp
@@ -257,4 +257,12 @@ ssize_t Crypto::decrypt(
errorDetailMsg);
}
+void Crypto::notifyResolution(uint32_t width, uint32_t height) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck == OK && mPlugin != NULL) {
+ mPlugin->notifyResolution(width, height);
+ }
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h
index c44ae34..0037c2e 100644
--- a/media/libmediaplayerservice/Crypto.h
+++ b/media/libmediaplayerservice/Crypto.h
@@ -45,6 +45,8 @@ struct Crypto : public BnCrypto {
virtual bool requiresSecureDecoderComponent(
const char *mime) const;
+ virtual void notifyResolution(uint32_t width, uint32_t height);
+
virtual ssize_t decrypt(
bool secure,
const uint8_t key[16],
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index 89d689d..73f1a2a 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -449,6 +449,20 @@ status_t Drm::getSecureStops(List<Vector<uint8_t> > &secureStops) {
return mPlugin->getSecureStops(secureStops);
}
+status_t Drm::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->getSecureStop(ssid, secureStop);
+}
+
status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Mutex::Autolock autoLock(mLock);
@@ -463,6 +477,20 @@ status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
return mPlugin->releaseSecureStops(ssRelease);
}
+status_t Drm::releaseAllSecureStops() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->releaseAllSecureStops();
+}
+
status_t Drm::getPropertyString(String8 const &name, String8 &value ) const {
Mutex::Autolock autoLock(mLock);
@@ -646,10 +674,14 @@ status_t Drm::signRSA(Vector<uint8_t> const &sessionId,
void Drm::binderDied(const wp<IBinder> &the_late_who)
{
+ mEventLock.lock();
+ mListener.clear();
+ mEventLock.unlock();
+
+ Mutex::Autolock autoLock(mLock);
delete mPlugin;
mPlugin = NULL;
closeFactory();
- mListener.clear();
}
} // namespace android
diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h
index 9e23e2e..0e1eb2c 100644
--- a/media/libmediaplayerservice/Drm.h
+++ b/media/libmediaplayerservice/Drm.h
@@ -78,8 +78,10 @@ struct Drm : public BnDrm,
virtual status_t unprovisionDevice();
virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops);
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
+ virtual status_t releaseAllSecureStops();
virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
virtual status_t getPropertyByteArray(String8 const &name,
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index 3e0fc0d..aeefb4c 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -20,9 +20,12 @@
#include <cutils/properties.h>
#include <media/IMediaPlayer.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
#include <media/stagefright/foundation/ADebug.h>
#include <utils/Errors.h>
#include <utils/misc.h>
+#include <../libstagefright/include/WVMExtractor.h>
#include "MediaPlayerFactory.h"
@@ -179,10 +182,18 @@ class StagefrightPlayerFactory :
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
int fd,
int64_t offset,
- int64_t /*length*/,
+ int64_t length,
float /*curScore*/) {
- if (getDefaultPlayerType()
- == STAGEFRIGHT_PLAYER) {
+ if (legacyDrm()) {
+ sp<DataSource> source = new FileSource(dup(fd), offset, length);
+ String8 mimeType;
+ float confidence;
+ if (SniffWVM(source, &mimeType, &confidence, NULL /* format */)) {
+ return 1.0;
+ }
+ }
+
+ if (getDefaultPlayerType() == STAGEFRIGHT_PLAYER) {
char buf[20];
lseek(fd, offset, SEEK_SET);
read(fd, buf, sizeof(buf));
@@ -198,10 +209,28 @@ class StagefrightPlayerFactory :
return 0.0;
}
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const char* url,
+ float /*curScore*/) {
+ if (legacyDrm() && !strncasecmp("widevine://", url, 11)) {
+ return 1.0;
+ }
+ return 0.0;
+ }
+
virtual sp<MediaPlayerBase> createPlayer() {
ALOGV(" create StagefrightPlayer");
return new StagefrightPlayer();
}
+ private:
+ bool legacyDrm() {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.legacy-drm", value, NULL)
+ && (!strcmp("1", value) || !strcasecmp("true", value))) {
+ return true;
+ }
+ return false;
+ }
};
class NuPlayerFactory : public MediaPlayerFactory::IFactory {
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 7bb154e..071b894 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2124,6 +2124,7 @@ ssize_t MediaPlayerService::AudioCache::write(const void* buffer, size_t size)
// immutable with respect to future writes.
//
// It is thus safe for another thread to read the AudioCache.
+ Mutex::Autolock lock(mLock);
mCommandComplete = true;
mSignal.signal();
}
@@ -2158,7 +2159,6 @@ void MediaPlayerService::AudioCache::notify(
{
case MEDIA_ERROR:
ALOGE("Error %d, %d occurred", ext1, ext2);
- p->mError = ext1;
break;
case MEDIA_PREPARED:
ALOGV("prepared");
@@ -2173,6 +2173,9 @@ void MediaPlayerService::AudioCache::notify(
// wake up thread
Mutex::Autolock lock(p->mLock);
+ if (msg == MEDIA_ERROR) {
+ p->mError = ext1;
+ }
p->mCommandComplete = true;
p->mSignal.signal();
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index de25291..86639cb 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,7 +111,7 @@ sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const
status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
ALOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
- as >= AUDIO_SOURCE_CNT) {
+ (as >= AUDIO_SOURCE_CNT && as != AUDIO_SOURCE_FM_TUNER)) {
ALOGE("Invalid audio source: %d", as);
return BAD_VALUE;
}
@@ -984,7 +984,7 @@ status_t StagefrightRecorder::setupAMRRecording() {
}
status_t StagefrightRecorder::setupRawAudioRecording() {
- if (mAudioSource >= AUDIO_SOURCE_CNT) {
+ if (mAudioSource >= AUDIO_SOURCE_CNT && mAudioSource != AUDIO_SOURCE_FM_TUNER) {
ALOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
}
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index 676c0a6..6609874 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -5,7 +5,9 @@ LOCAL_SRC_FILES:= \
GenericSource.cpp \
HTTPLiveSource.cpp \
NuPlayer.cpp \
+ NuPlayerCCDecoder.cpp \
NuPlayerDecoder.cpp \
+ NuPlayerDecoderBase.cpp \
NuPlayerDecoderPassThrough.cpp \
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 6859a1a..1af2713 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -45,13 +45,19 @@ NuPlayer::GenericSource::GenericSource(
bool uidValid,
uid_t uid)
: Source(notify),
+ mAudioTimeUs(0),
+ mAudioLastDequeueTimeUs(0),
+ mVideoTimeUs(0),
+ mVideoLastDequeueTimeUs(0),
mFetchSubtitleDataGeneration(0),
mFetchTimedTextDataGeneration(0),
mDurationUs(0ll),
mAudioIsVorbis(false),
mIsWidevine(false),
+ mIsSecure(false),
mUIDValid(uidValid),
mUID(uid),
+ mFd(-1),
mDrmManagerClient(NULL),
mMetaDataSize(-1ll),
mBitrate(-1ll),
@@ -62,13 +68,14 @@ NuPlayer::GenericSource::GenericSource(
}
void NuPlayer::GenericSource::resetDataSource() {
- mAudioTimeUs = 0;
- mVideoTimeUs = 0;
mHTTPService.clear();
mHttpSource.clear();
mUri.clear();
mUriHeaders.clear();
- mFd = -1;
+ if (mFd >= 0) {
+ close(mFd);
+ mFd = -1;
+ }
mOffset = 0;
mLength = 0;
setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
@@ -157,6 +164,17 @@ status_t NuPlayer::GenericSource::initFromDataSource() {
if (mFileMeta->findInt64(kKeyDuration, &duration)) {
mDurationUs = duration;
}
+
+ if (!mIsWidevine) {
+ // Check mime to see if we actually have a widevine source.
+ // If the data source is not URL-type (eg. file source), we
+ // won't be able to tell until now.
+ const char *fileMime;
+ if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
+ && !strncasecmp(fileMime, "video/wvm", 9)) {
+ mIsWidevine = true;
+ }
+ }
}
int32_t totalBitrate = 0;
@@ -202,7 +220,7 @@ status_t NuPlayer::GenericSource::initFromDataSource() {
int32_t secure;
if (meta->findInt32(kKeyRequiresSecureBuffers, &secure)
&& secure) {
- mIsWidevine = true;
+ mIsSecure = true;
if (mUIDValid) {
extractor->setUID(mUID);
}
@@ -257,7 +275,7 @@ int64_t NuPlayer::GenericSource::getLastReadPosition() {
status_t NuPlayer::GenericSource::setBuffers(
bool audio, Vector<MediaBuffer *> &buffers) {
- if (mIsWidevine && !audio) {
+ if (mIsSecure && !audio) {
return mVideoTrack.mSource->setBuffers(buffers);
}
return INVALID_OPERATION;
@@ -268,6 +286,7 @@ NuPlayer::GenericSource::~GenericSource() {
mLooper->unregisterHandler(id());
mLooper->stop();
}
+ resetDataSource();
}
void NuPlayer::GenericSource::prepareAsync() {
@@ -286,6 +305,10 @@ void NuPlayer::GenericSource::prepareAsync() {
void NuPlayer::GenericSource::onPrepareAsync() {
// delayed data source creation
if (mDataSource == NULL) {
+ // set to false first, if the extractor
+ // comes back as secure, set it to true then.
+ mIsSecure = false;
+
if (!mUri.empty()) {
const char* uri = mUri.c_str();
mIsWidevine = !strncasecmp(uri, "widevine://", 11);
@@ -305,11 +328,10 @@ void NuPlayer::GenericSource::onPrepareAsync() {
mHTTPService, uri, &mUriHeaders, &mContentType,
static_cast<HTTPBase *>(mHttpSource.get()));
} else {
- // set to false first, if the extractor
- // comes back as secure, set it to true then.
mIsWidevine = false;
mDataSource = new FileSource(mFd, mOffset, mLength);
+ mFd = -1;
}
if (mDataSource == NULL) {
@@ -360,7 +382,7 @@ void NuPlayer::GenericSource::onPrepareAsync() {
}
notifyFlagsChanged(
- (mIsWidevine ? FLAG_SECURE : 0)
+ (mIsSecure ? FLAG_SECURE : 0)
| FLAG_CAN_PAUSE
| FLAG_CAN_SEEK_BACKWARD
| FLAG_CAN_SEEK_FORWARD
@@ -477,8 +499,8 @@ void NuPlayer::GenericSource::stop() {
// nothing to do, just account for DRM playback status
setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
mStarted = false;
- if (mIsWidevine) {
- // For a widevine source we need to prevent any further reads.
+ if (mIsWidevine || mIsSecure) {
+ // For widevine or secure sources we need to prevent any further reads.
sp<AMessage> msg = new AMessage(kWhatStopWidevine, id());
sp<AMessage> response;
(void) msg->postAndAwaitResponse(&response);
@@ -644,17 +666,13 @@ void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
track->mSource->start();
track->mIndex = trackIndex;
- status_t avail;
- if (!track->mPackets->hasBufferAvailable(&avail)) {
- // sync from other source
- TRESPASS();
- break;
- }
-
int64_t timeUs, actualTimeUs;
const bool formatChange = true;
- sp<AMessage> latestMeta = track->mPackets->getLatestEnqueuedMeta();
- CHECK(latestMeta != NULL && latestMeta->findInt64("timeUs", &timeUs));
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ timeUs = mAudioLastDequeueTimeUs;
+ } else {
+ timeUs = mVideoLastDequeueTimeUs;
+ }
readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
readBuffer(counterpartType, -1, NULL, formatChange);
ALOGV("timeUs %lld actualTimeUs %lld", timeUs, actualTimeUs);
@@ -842,7 +860,12 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(
status_t finalResult;
if (!track->mPackets->hasBufferAvailable(&finalResult)) {
- return (finalResult == OK ? -EWOULDBLOCK : finalResult);
+ if (finalResult == OK) {
+ postReadBuffer(
+ audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+ return -EWOULDBLOCK;
+ }
+ return finalResult;
}
status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
@@ -866,6 +889,11 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(
int64_t timeUs;
status_t eosResult; // ignored
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+ if (audio) {
+ mAudioLastDequeueTimeUs = timeUs;
+ } else {
+ mVideoLastDequeueTimeUs = timeUs;
+ }
if (mSubtitleTrack.mSource != NULL
&& !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
@@ -996,11 +1024,12 @@ ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const
return -1;
}
-status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select) {
+status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
msg->setInt32("trackIndex", trackIndex);
msg->setInt32("select", select);
+ msg->setInt64("timeUs", timeUs);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -1013,11 +1042,13 @@ status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select) {
void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {
int32_t trackIndex, select;
+ int64_t timeUs;
CHECK(msg->findInt32("trackIndex", &trackIndex));
CHECK(msg->findInt32("select", &select));
+ CHECK(msg->findInt64("timeUs", &timeUs));
sp<AMessage> response = new AMessage;
- status_t err = doSelectTrack(trackIndex, select);
+ status_t err = doSelectTrack(trackIndex, select, timeUs);
response->setInt32("err", err);
uint32_t replyID;
@@ -1025,7 +1056,7 @@ void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {
response->postReply(replyID);
}
-status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select) {
+status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, int64_t timeUs) {
if (trackIndex >= mSources.size()) {
return BAD_INDEX;
}
@@ -1078,6 +1109,23 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select)
mFetchTimedTextDataGeneration++;
}
+ status_t eosResult; // ignored
+ if (mSubtitleTrack.mSource != NULL
+ && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchSubtitleDataGeneration);
+ msg->post();
+ }
+
+ if (mTimedTextTrack.mSource != NULL
+ && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchTimedTextDataGeneration);
+ msg->post();
+ }
+
return OK;
} else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
bool audio = !strncasecmp(mime, "audio/", 6);
@@ -1132,10 +1180,12 @@ status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
seekTimeUs = actualTimeUs;
+ mVideoLastDequeueTimeUs = seekTimeUs;
}
if (mAudioTrack.mSource != NULL) {
readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
+ mAudioLastDequeueTimeUs = seekTimeUs;
}
setDrmPlaybackStatusIfNeeded(Playback::START, seekTimeUs / 1000);
@@ -1148,6 +1198,7 @@ status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
MediaBuffer* mb,
media_track_type trackType,
+ int64_t /* seekTimeUs */,
int64_t *actualTimeUs) {
bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
size_t outLength = mb->range_length();
@@ -1157,7 +1208,7 @@ sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
}
sp<ABuffer> ab;
- if (mIsWidevine && !audio) {
+ if (mIsSecure && !audio) {
// data is already provided in the buffer
ab = new ABuffer(NULL, mb->range_length());
mb->add_ref();
@@ -1185,6 +1236,16 @@ sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
meta->setInt64("timeUs", timeUs);
+#if 0
+ // Temporarily disable pre-roll till we have a full solution to handle
+ // both single seek and continous seek gracefully.
+ if (seekTimeUs > timeUs) {
+ sp<AMessage> extra = new AMessage;
+ extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+ meta->setMessage("extra", extra);
+ }
+#endif
+
if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
const char *mime;
CHECK(mTimedTextTrack.mSource != NULL
@@ -1226,14 +1287,13 @@ void NuPlayer::GenericSource::onReadBuffer(sp<AMessage> msg) {
int32_t tmpType;
CHECK(msg->findInt32("trackType", &tmpType));
media_track_type trackType = (media_track_type)tmpType;
+ readBuffer(trackType);
{
// only protect the variable change, as readBuffer may
- // take considerable time. This may result in one extra
- // read being processed, but that is benign.
+ // take considerable time.
Mutex::Autolock _l(mReadBufferLock);
mPendingReadBufferTypes &= ~(1 << trackType);
}
- readBuffer(trackType);
}
void NuPlayer::GenericSource::readBuffer(
@@ -1286,7 +1346,7 @@ void NuPlayer::GenericSource::readBuffer(
seeking = true;
}
- if (mIsWidevine && trackType != MEDIA_TRACK_TYPE_AUDIO) {
+ if (mIsWidevine) {
options.setNonBlocking();
}
@@ -1311,15 +1371,14 @@ void NuPlayer::GenericSource::readBuffer(
if ((seeking || formatChange)
&& (trackType == MEDIA_TRACK_TYPE_AUDIO
|| trackType == MEDIA_TRACK_TYPE_VIDEO)) {
- ATSParser::DiscontinuityType type = formatChange
- ? (seeking
- ? ATSParser::DISCONTINUITY_FORMATCHANGE
- : ATSParser::DISCONTINUITY_NONE)
- : ATSParser::DISCONTINUITY_SEEK;
+ ATSParser::DiscontinuityType type = (formatChange && seeking)
+ ? ATSParser::DISCONTINUITY_FORMATCHANGE
+ : ATSParser::DISCONTINUITY_NONE;
track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
}
- sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType, actualTimeUs);
+ sp<ABuffer> buffer = mediaBufferToABuffer(
+ mbuf, trackType, seekTimeUs, actualTimeUs);
track->mPackets->queueAccessUnit(buffer);
formatChange = false;
seeking = false;
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index f8601ea..1b63a1f 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -67,7 +67,7 @@ struct NuPlayer::GenericSource : public NuPlayer::Source {
virtual size_t getTrackCount() const;
virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
virtual ssize_t getSelectedTrack(media_track_type type) const;
- virtual status_t selectTrack(size_t trackIndex, bool select);
+ virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
virtual status_t seekTo(int64_t seekTimeUs);
virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
@@ -106,8 +106,10 @@ private:
Track mAudioTrack;
int64_t mAudioTimeUs;
+ int64_t mAudioLastDequeueTimeUs;
Track mVideoTrack;
int64_t mVideoTimeUs;
+ int64_t mVideoLastDequeueTimeUs;
Track mSubtitleTrack;
Track mTimedTextTrack;
@@ -116,6 +118,7 @@ private:
int64_t mDurationUs;
bool mAudioIsVorbis;
bool mIsWidevine;
+ bool mIsSecure;
bool mUIDValid;
uid_t mUID;
sp<IMediaHTTPService> mHTTPService;
@@ -162,7 +165,7 @@ private:
ssize_t doGetSelectedTrack(media_track_type type) const;
void onSelectTrack(sp<AMessage> msg);
- status_t doSelectTrack(size_t trackIndex, bool select);
+ status_t doSelectTrack(size_t trackIndex, bool select, int64_t timeUs);
void onSeek(sp<AMessage> msg);
status_t doSeek(int64_t seekTimeUs);
@@ -180,6 +183,7 @@ private:
sp<ABuffer> mediaBufferToABuffer(
MediaBuffer *mbuf,
media_track_type trackType,
+ int64_t seekTimeUs,
int64_t *actualTimeUs = NULL);
void postReadBuffer(media_track_type trackType);
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index a003c81..a26ef9e 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -98,6 +98,10 @@ void NuPlayer::HTTPLiveSource::start() {
}
sp<AMessage> NuPlayer::HTTPLiveSource::getFormat(bool audio) {
+ if (mLiveSession == NULL) {
+ return NULL;
+ }
+
sp<AMessage> format;
status_t err = mLiveSession->getStreamFormat(
audio ? LiveSession::STREAMTYPE_AUDIO
@@ -135,7 +139,15 @@ sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
return mLiveSession->getTrackInfo(trackIndex);
}
-status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
+ssize_t NuPlayer::HTTPLiveSource::getSelectedTrack(media_track_type type) const {
+ if (mLiveSession == NULL) {
+ return -1;
+ } else {
+ return mLiveSession->getSelectedTrack(type);
+ }
+}
+
+status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) {
status_t err = mLiveSession->selectTrack(trackIndex, select);
if (err == OK) {
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index 6b5f6af..bbb8981 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -42,7 +42,8 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
virtual status_t getDuration(int64_t *durationUs);
virtual size_t getTrackCount() const;
virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
- virtual status_t selectTrack(size_t trackIndex, bool select);
+ virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
+ virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
virtual status_t seekTo(int64_t seekTimeUs);
protected:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index a63a940..080cd52 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -21,7 +21,9 @@
#include "NuPlayer.h"
#include "HTTPLiveSource.h"
+#include "NuPlayerCCDecoder.h"
#include "NuPlayerDecoder.h"
+#include "NuPlayerDecoderBase.h"
#include "NuPlayerDecoderPassThrough.h"
#include "NuPlayerDriver.h"
#include "NuPlayerRenderer.h"
@@ -33,6 +35,8 @@
#include "ATSParser.h"
+#include <cutils/properties.h>
+
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -50,10 +54,6 @@
namespace android {
-// TODO optimize buffer size for power consumption
-// The offload read buffer size is 32 KB but 24 KB uses less power.
-const size_t NuPlayer::kAggregateBufferSizeBytes = 24 * 1024;
-
struct NuPlayer::Action : public RefBase {
Action() {}
@@ -80,6 +80,21 @@ private:
DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
};
+struct NuPlayer::ResumeDecoderAction : public Action {
+ ResumeDecoderAction(bool needNotify)
+ : mNeedNotify(needNotify) {
+ }
+
+ virtual void execute(NuPlayer *player) {
+ player->performResumeDecoders(mNeedNotify);
+ }
+
+private:
+ bool mNeedNotify;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ResumeDecoderAction);
+};
+
struct NuPlayer::SetSurfaceAction : public Action {
SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper)
: mWrapper(wrapper) {
@@ -95,21 +110,21 @@ private:
DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
};
-struct NuPlayer::ShutdownDecoderAction : public Action {
- ShutdownDecoderAction(bool audio, bool video)
+struct NuPlayer::FlushDecoderAction : public Action {
+ FlushDecoderAction(FlushCommand audio, FlushCommand video)
: mAudio(audio),
mVideo(video) {
}
virtual void execute(NuPlayer *player) {
- player->performDecoderShutdown(mAudio, mVideo);
+ player->performDecoderFlush(mAudio, mVideo);
}
private:
- bool mAudio;
- bool mVideo;
+ FlushCommand mAudio;
+ FlushCommand mVideo;
- DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction);
+ DISALLOW_EVIL_CONSTRUCTORS(FlushDecoderAction);
};
struct NuPlayer::PostMessageAction : public Action {
@@ -151,7 +166,6 @@ private:
NuPlayer::NuPlayer()
: mUIDValid(false),
mSourceFlags(0),
- mVideoIsAVC(false),
mOffloadAudio(false),
mAudioDecoderGeneration(0),
mVideoDecoderGeneration(0),
@@ -162,13 +176,9 @@ NuPlayer::NuPlayer()
mScanSourcesGeneration(0),
mPollDurationGeneration(0),
mTimedTextGeneration(0),
- mTimeDiscontinuityPending(false),
mFlushingAudio(NONE),
mFlushingVideo(NONE),
- mSkipRenderingAudioUntilMediaTimeUs(-1ll),
- mSkipRenderingVideoUntilMediaTimeUs(-1ll),
- mNumFramesTotal(0ll),
- mNumFramesDropped(0ll),
+ mResumePending(false),
mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
mStarted(false) {
clearFlushComplete();
@@ -306,10 +316,6 @@ void NuPlayer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
-void NuPlayer::resume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
void NuPlayer::resetAsync() {
if (mSource != NULL) {
// During a reset, the data source might be unresponsive already, we need to
@@ -459,8 +465,10 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
size_t trackIndex;
int32_t select;
+ int64_t timeUs;
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK(msg->findInt32("select", &select));
+ CHECK(msg->findInt64("timeUs", &timeUs));
status_t err = INVALID_OPERATION;
@@ -474,7 +482,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
if (trackIndex < inbandTracks) {
- err = mSource->selectTrack(trackIndex, select);
+ err = mSource->selectTrack(trackIndex, select, timeUs);
if (!select && err == OK) {
int32_t type;
@@ -526,19 +534,24 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
ALOGV("kWhatSetVideoNativeWindow");
- mDeferredActions.push_back(
- new ShutdownDecoderAction(
- false /* audio */, true /* video */));
-
sp<RefBase> obj;
CHECK(msg->findObject("native-window", &obj));
+ if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL) {
+ performSetSurface(static_cast<NativeWindowWrapper *>(obj.get()));
+ break;
+ }
+
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
+
mDeferredActions.push_back(
new SetSurfaceAction(
static_cast<NativeWindowWrapper *>(obj.get())));
if (obj != NULL) {
- if (mStarted && mSource->getFormat(false /* audio */) != NULL) {
+ if (mStarted) {
// Issue a seek to refresh the video screen only if started otherwise
// the extractor may not yet be started and will assert.
// If the video decoder is not set (perhaps audio only in this case)
@@ -556,6 +569,12 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
new SimpleAction(&NuPlayer::performScanSources));
}
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(
+ new ResumeDecoderAction(false /* needNotify */));
+
processDeferredActions();
break;
}
@@ -574,69 +593,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatStart:
{
ALOGV("kWhatStart");
-
- mVideoIsAVC = false;
- mOffloadAudio = false;
- mAudioEOS = false;
- mVideoEOS = false;
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- mNumFramesTotal = 0;
- mNumFramesDropped = 0;
- mStarted = true;
-
- /* instantiate decoders now for secure playback */
- if (mSourceFlags & Source::FLAG_SECURE) {
- if (mNativeWindow != NULL) {
- instantiateDecoder(false, &mVideoDecoder);
- }
-
- if (mAudioSink != NULL) {
- instantiateDecoder(true, &mAudioDecoder);
- }
- }
-
- mSource->start();
-
- uint32_t flags = 0;
-
- if (mSource->isRealTime()) {
- flags |= Renderer::FLAG_REAL_TIME;
- }
-
- sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
- audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
- if (mAudioSink != NULL) {
- streamType = mAudioSink->getAudioStreamType();
- }
-
- sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
-
- mOffloadAudio =
- canOffloadStream(audioMeta, (videoFormat != NULL),
- true /* is_streaming */, streamType);
- if (mOffloadAudio) {
- flags |= Renderer::FLAG_OFFLOAD_AUDIO;
- }
-
- sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
- ++mRendererGeneration;
- notify->setInt32("generation", mRendererGeneration);
- mRenderer = new Renderer(mAudioSink, notify, flags);
-
- mRendererLooper = new ALooper;
- mRendererLooper->setName("NuPlayerRenderer");
- mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
- mRendererLooper->registerHandler(mRenderer);
-
- sp<MetaData> meta = getFileMeta();
- int32_t rate;
- if (meta != NULL
- && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
- mRenderer->setVideoFrameRate(rate);
+ if (mStarted) {
+ onResume();
+ } else {
+ onStart();
}
-
- postScanSources();
break;
}
@@ -663,11 +624,21 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
instantiateDecoder(false, &mVideoDecoder);
}
- if (mAudioSink != NULL) {
- if (mOffloadAudio) {
+ // Don't try to re-open audio sink if there's an existing decoder.
+ if (mAudioSink != NULL && mAudioDecoder == NULL) {
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+ audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+ const bool hasVideo = (videoFormat != NULL);
+ const bool canOffload = canOffloadStream(
+ audioMeta, hasVideo, true /* is_streaming */, streamType);
+ if (canOffload) {
+ if (!mOffloadAudio) {
+ mRenderer->signalEnableOffloadAudio();
+ }
// open audio sink early under offload mode.
sp<AMessage> format = mSource->getFormat(true /*audio*/);
- openAudioSink(format, true /*offloadOnly*/);
+ tryOpenAudioSinkForOffload(format, hasVideo);
}
instantiateDecoder(true, &mAudioDecoder);
}
@@ -731,16 +702,26 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
int32_t what;
CHECK(msg->findInt32("what", &what));
- if (what == Decoder::kWhatFillThisBuffer) {
- status_t err = feedDecoderInputData(
- audio, msg);
+ if (what == DecoderBase::kWhatInputDiscontinuity) {
+ int32_t formatChange;
+ CHECK(msg->findInt32("formatChange", &formatChange));
- if (err == -EWOULDBLOCK) {
- if (mSource->feedMoreTSData() == OK) {
- msg->post(10 * 1000ll);
- }
+ ALOGV("%s discontinuity: formatChange %d",
+ audio ? "audio" : "video", formatChange);
+
+ if (formatChange) {
+ mDeferredActions.push_back(
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
}
- } else if (what == Decoder::kWhatEOS) {
+
+ mDeferredActions.push_back(
+ new SimpleAction(
+ &NuPlayer::performScanSources));
+
+ processDeferredActions();
+ } else if (what == DecoderBase::kWhatEOS) {
int32_t err;
CHECK(msg->findInt32("err", &err));
@@ -753,25 +734,20 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
mRenderer->queueEOS(audio, err);
- } else if (what == Decoder::kWhatFlushCompleted) {
+ } else if (what == DecoderBase::kWhatFlushCompleted) {
ALOGV("decoder %s flush completed", audio ? "audio" : "video");
handleFlushComplete(audio, true /* isDecoder */);
finishFlushIfPossible();
- } else if (what == Decoder::kWhatOutputFormatChanged) {
+ } else if (what == DecoderBase::kWhatVideoSizeChanged) {
sp<AMessage> format;
CHECK(msg->findMessage("format", &format));
- if (audio) {
- openAudioSink(format, false /*offloadOnly*/);
- } else {
- // video
- sp<AMessage> inputFormat =
- mSource->getFormat(false /* audio */);
+ sp<AMessage> inputFormat =
+ mSource->getFormat(false /* audio */);
- updateVideoSize(inputFormat, format);
- }
- } else if (what == Decoder::kWhatShutdownCompleted) {
+ updateVideoSize(inputFormat, format);
+ } else if (what == DecoderBase::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
mAudioDecoder.clear();
@@ -788,7 +764,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
finishFlushIfPossible();
- } else if (what == Decoder::kWhatError) {
+ } else if (what == DecoderBase::kWhatResumeCompleted) {
+ finishResume();
+ } else if (what == DecoderBase::kWhatError) {
status_t err;
if (!msg->findInt32("err", &err) || err == OK) {
err = UNKNOWN_ERROR;
@@ -797,6 +775,8 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
// Decoder errors can be due to Source (e.g. from streaming),
// or from decoding corrupted bitstreams, or from other decoder
// MediaCodec operations (e.g. from an ongoing reset or seek).
+ // They may also be due to openAudioSink failure at
+ // decoder start or after a format change.
//
// We try to gracefully shut down the affected decoder if possible,
// rather than trying to force the shutdown with something
@@ -811,7 +791,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
switch (*flushing) {
case NONE:
mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, !audio /* video */));
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
processDeferredActions();
break;
case FLUSHING_DECODER:
@@ -834,8 +816,6 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break; // Finish anyways.
}
notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
- } else if (what == Decoder::kWhatDrainThisBuffer) {
- renderBuffer(audio, msg);
} else {
ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
what,
@@ -901,7 +881,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
} else if (what == Renderer::kWhatAudioOffloadTearDown) {
- ALOGV("Tear down audio offload, fall back to s/w path");
+ ALOGV("Tear down audio offload, fall back to s/w path if due to error.");
int64_t positionUs;
CHECK(msg->findInt64("positionUs", &positionUs));
int32_t reason;
@@ -909,15 +889,17 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
closeAudioSink();
mAudioDecoder.clear();
++mAudioDecoderGeneration;
- mRenderer->flush(true /* audio */);
+ mRenderer->flush(
+ true /* audio */, false /* notifyComplete */);
if (mVideoDecoder != NULL) {
- mRenderer->flush(false /* audio */);
+ mRenderer->flush(
+ false /* audio */, false /* notifyComplete */);
}
- mRenderer->signalDisableOffloadAudio();
- mOffloadAudio = false;
performSeek(positionUs, false /* needNotify */);
if (reason == Renderer::kDueToError) {
+ mRenderer->signalDisableOffloadAudio();
+ mOffloadAudio = false;
instantiateDecoder(true /* audio */, &mAudioDecoder);
}
}
@@ -934,8 +916,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("kWhatReset");
mDeferredActions.push_back(
- new ShutdownDecoderAction(
- true /* audio */, true /* video */));
+ new FlushDecoderAction(
+ FLUSH_CMD_SHUTDOWN /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performReset));
@@ -955,11 +938,18 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
seekTimeUs, needNotify);
mDeferredActions.push_back(
- new SimpleAction(&NuPlayer::performDecoderFlush));
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_FLUSH /* video */));
mDeferredActions.push_back(
new SeekAction(seekTimeUs, needNotify));
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(
+ new ResumeDecoderAction(needNotify));
+
processDeferredActions();
break;
}
@@ -979,26 +969,6 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatResume:
- {
- if (mSource != NULL) {
- mSource->resume();
- } else {
- ALOGW("resume called when source is gone or not set");
- }
- // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
- // needed.
- if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
- instantiateDecoder(true /* audio */, &mAudioDecoder);
- }
- if (mRenderer != NULL) {
- mRenderer->resume();
- } else {
- ALOGW("resume called when renderer is gone or not set");
- }
- break;
- }
-
case kWhatSourceNotify:
{
onSourceNotify(msg);
@@ -1017,6 +987,91 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
}
+void NuPlayer::onResume() {
+ if (mSource != NULL) {
+ mSource->resume();
+ } else {
+ ALOGW("resume called when source is gone or not set");
+ }
+ // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+ // needed.
+ if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
+ if (mRenderer != NULL) {
+ mRenderer->resume();
+ } else {
+ ALOGW("resume called when renderer is gone or not set");
+ }
+}
+
+void NuPlayer::onStart() {
+ mOffloadAudio = false;
+ mAudioEOS = false;
+ mVideoEOS = false;
+ mStarted = true;
+
+ /* instantiate decoders now for secure playback */
+ if (mSourceFlags & Source::FLAG_SECURE) {
+ if (mNativeWindow != NULL) {
+ instantiateDecoder(false, &mVideoDecoder);
+ }
+
+ if (mAudioSink != NULL) {
+ instantiateDecoder(true, &mAudioDecoder);
+ }
+ }
+
+ mSource->start();
+
+ uint32_t flags = 0;
+
+ if (mSource->isRealTime()) {
+ flags |= Renderer::FLAG_REAL_TIME;
+ }
+
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+
+ mOffloadAudio =
+ canOffloadStream(audioMeta, (videoFormat != NULL),
+ true /* is_streaming */, streamType);
+ if (mOffloadAudio) {
+ flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+ }
+
+ sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
+ ++mRendererGeneration;
+ notify->setInt32("generation", mRendererGeneration);
+ mRenderer = new Renderer(mAudioSink, notify, flags);
+
+ mRendererLooper = new ALooper;
+ mRendererLooper->setName("NuPlayerRenderer");
+ mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ mRendererLooper->registerHandler(mRenderer);
+
+ sp<MetaData> meta = getFileMeta();
+ int32_t rate;
+ if (meta != NULL
+ && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
+ mRenderer->setVideoFrameRate(rate);
+ }
+
+ if (mVideoDecoder != NULL) {
+ mVideoDecoder->setRenderer(mRenderer);
+ }
+ if (mAudioDecoder != NULL) {
+ mAudioDecoder->setRenderer(mRenderer);
+ }
+
+ postScanSources();
+}
+
bool NuPlayer::audioDecoderStillNeeded() {
// Audio decoder is no longer needed if it's in shut/shutting down status.
return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
@@ -1074,22 +1129,6 @@ void NuPlayer::finishFlushIfPossible() {
ALOGV("both audio and video are flushed now.");
- mPendingAudioAccessUnit.clear();
- mAggregateBuffer.clear();
-
- if (mTimeDiscontinuityPending) {
- mRenderer->signalTimeDiscontinuity();
- mTimeDiscontinuityPending = false;
- }
-
- if (mAudioDecoder != NULL && mFlushingAudio == FLUSHED) {
- mAudioDecoder->signalResume();
- }
-
- if (mVideoDecoder != NULL && mFlushingVideo == FLUSHED) {
- mVideoDecoder->signalResume();
- }
-
mFlushingAudio = NONE;
mFlushingVideo = NONE;
@@ -1110,28 +1149,16 @@ void NuPlayer::postScanSources() {
mScanSourcesPending = true;
}
-void NuPlayer::openAudioSink(const sp<AMessage> &format, bool offloadOnly) {
- uint32_t flags;
- int64_t durationUs;
- bool hasVideo = (mVideoDecoder != NULL);
- // FIXME: we should handle the case where the video decoder
- // is created after we receive the format change indication.
- // Current code will just make that we select deep buffer
- // with video which should not be a problem as it should
- // not prevent from keeping A/V sync.
- if (hasVideo &&
- mSource->getDuration(&durationUs) == OK &&
- durationUs
- > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
- flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
- } else {
- flags = AUDIO_OUTPUT_FLAG_NONE;
- }
+void NuPlayer::tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo) {
+ // Note: This is called early in NuPlayer to determine whether offloading
+ // is possible; otherwise the decoders call the renderer openAudioSink directly.
- mOffloadAudio = mRenderer->openAudioSink(
- format, offloadOnly, hasVideo, flags);
-
- if (mOffloadAudio) {
+ status_t err = mRenderer->openAudioSink(
+ format, true /* offloadOnly */, hasVideo, AUDIO_OUTPUT_FLAG_NONE, &mOffloadAudio);
+ if (err != OK) {
+ // Any failure we turn off mOffloadAudio.
+ mOffloadAudio = false;
+ } else if (mOffloadAudio) {
sp<MetaData> audioMeta =
mSource->getFormatMeta(true /* audio */);
sendMetaDataToHal(mAudioSink, audioMeta);
@@ -1142,7 +1169,7 @@ void NuPlayer::closeAudioSink() {
mRenderer->closeAudioSink();
}
-status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
+status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {
if (*decoder != NULL) {
return OK;
}
@@ -1156,7 +1183,6 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
if (!audio) {
AString mime;
CHECK(format->findString("mime", &mime));
- mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id());
mCCDecoder = new CCDecoder(ccNotify);
@@ -1172,16 +1198,28 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
notify->setInt32("generation", mAudioDecoderGeneration);
if (mOffloadAudio) {
- *decoder = new DecoderPassThrough(notify);
+ *decoder = new DecoderPassThrough(notify, mSource, mRenderer);
} else {
- *decoder = new Decoder(notify);
+ *decoder = new Decoder(notify, mSource, mRenderer);
}
} else {
sp<AMessage> notify = new AMessage(kWhatVideoNotify, id());
++mVideoDecoderGeneration;
notify->setInt32("generation", mVideoDecoderGeneration);
- *decoder = new Decoder(notify, mNativeWindow);
+ *decoder = new Decoder(
+ notify, mSource, mRenderer, mNativeWindow, mCCDecoder);
+
+ // enable FRC if high-quality AV sync is requested, even if not
+ // queuing to native window, as this will even improve textureview
+ // playback.
+ {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.avsync", value, NULL) &&
+ (!strcmp("1", value) || !strcasecmp("true", value))) {
+ format->setInt32("auto-frc", 1);
+ }
+ }
}
(*decoder)->init();
(*decoder)->configure(format);
@@ -1211,281 +1249,6 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
return OK;
}
-status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
-
- if ((audio && mFlushingAudio != NONE)
- || (!audio && mFlushingVideo != NONE)
- || mSource == NULL) {
- reply->setInt32("err", INFO_DISCONTINUITY);
- reply->post();
- return OK;
- }
-
- sp<ABuffer> accessUnit;
-
- // Aggregate smaller buffers into a larger buffer.
- // The goal is to reduce power consumption.
- // Note this will not work if the decoder requires one frame per buffer.
- bool doBufferAggregation = (audio && mOffloadAudio);
- bool needMoreData = false;
-
- bool dropAccessUnit;
- do {
- status_t err;
- // Did we save an accessUnit earlier because of a discontinuity?
- if (audio && (mPendingAudioAccessUnit != NULL)) {
- accessUnit = mPendingAudioAccessUnit;
- mPendingAudioAccessUnit.clear();
- err = mPendingAudioErr;
- ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
- } else {
- err = mSource->dequeueAccessUnit(audio, &accessUnit);
- }
-
- if (err == -EWOULDBLOCK) {
- return err;
- } else if (err != OK) {
- if (err == INFO_DISCONTINUITY) {
- if (doBufferAggregation && (mAggregateBuffer != NULL)) {
- // We already have some data so save this for later.
- mPendingAudioErr = err;
- mPendingAudioAccessUnit = accessUnit;
- accessUnit.clear();
- ALOGD("feedDecoderInputData() save discontinuity for later");
- break;
- }
- int32_t type;
- CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
-
- bool formatChange =
- (audio &&
- (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
- || (!audio &&
- (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
-
- bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
-
- ALOGI("%s discontinuity (formatChange=%d, time=%d)",
- audio ? "audio" : "video", formatChange, timeChange);
-
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- }
-
- if (timeChange) {
- sp<AMessage> extra;
- if (accessUnit->meta()->findMessage("extra", &extra)
- && extra != NULL) {
- int64_t resumeAtMediaTimeUs;
- if (extra->findInt64(
- "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
- ALOGI("suppressing rendering of %s until %lld us",
- audio ? "audio" : "video", resumeAtMediaTimeUs);
-
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- }
- }
- }
- }
-
- mTimeDiscontinuityPending =
- mTimeDiscontinuityPending || timeChange;
-
- bool seamlessFormatChange = false;
- sp<AMessage> newFormat = mSource->getFormat(audio);
- if (formatChange) {
- seamlessFormatChange =
- getDecoder(audio)->supportsSeamlessFormatChange(newFormat);
- // treat seamless format change separately
- formatChange = !seamlessFormatChange;
- }
- bool shutdownOrFlush = formatChange || timeChange;
-
- // We want to queue up scan-sources only once per discontinuity.
- // We control this by doing it only if neither audio nor video are
- // flushing or shutting down. (After handling 1st discontinuity, one
- // of the flushing states will not be NONE.)
- // No need to scan sources if this discontinuity does not result
- // in a flush or shutdown, as the flushing state will stay NONE.
- if (mFlushingAudio == NONE && mFlushingVideo == NONE &&
- shutdownOrFlush) {
- // And we'll resume scanning sources once we're done
- // flushing.
- mDeferredActions.push_front(
- new SimpleAction(
- &NuPlayer::performScanSources));
- }
-
- if (formatChange /* not seamless */) {
- // must change decoder
- flushDecoder(audio, /* needShutdown = */ true);
- } else if (timeChange) {
- // need to flush
- flushDecoder(audio, /* needShutdown = */ false, newFormat);
- err = OK;
- } else if (seamlessFormatChange) {
- // reuse existing decoder and don't flush
- updateDecoderFormatWithoutFlush(audio, newFormat);
- err = OK;
- } else {
- // This stream is unaffected by the discontinuity
- return -EWOULDBLOCK;
- }
- }
-
- reply->setInt32("err", err);
- reply->post();
- return OK;
- }
-
- if (!audio) {
- ++mNumFramesTotal;
- }
-
- dropAccessUnit = false;
- if (!audio
- && !(mSourceFlags & Source::FLAG_SECURE)
- && mRenderer->getVideoLateByUs() > 100000ll
- && mVideoIsAVC
- && !IsAVCReferenceFrame(accessUnit)) {
- dropAccessUnit = true;
- ++mNumFramesDropped;
- }
-
- size_t smallSize = accessUnit->size();
- needMoreData = false;
- if (doBufferAggregation && (mAggregateBuffer == NULL)
- // Don't bother if only room for a few small buffers.
- && (smallSize < (kAggregateBufferSizeBytes / 3))) {
- // Create a larger buffer for combining smaller buffers from the extractor.
- mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
- mAggregateBuffer->setRange(0, 0); // start empty
- }
-
- if (doBufferAggregation && (mAggregateBuffer != NULL)) {
- int64_t timeUs;
- int64_t dummy;
- bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
- bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
- // Will the smaller buffer fit?
- size_t bigSize = mAggregateBuffer->size();
- size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
- // Should we save this small buffer for the next big buffer?
- // If the first small buffer did not have a timestamp then save
- // any buffer that does have a timestamp until the next big buffer.
- if ((smallSize > roomLeft)
- || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
- mPendingAudioErr = err;
- mPendingAudioAccessUnit = accessUnit;
- accessUnit.clear();
- } else {
- // Grab time from first small buffer if available.
- if ((bigSize == 0) && smallTimestampValid) {
- mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
- }
- // Append small buffer to the bigger buffer.
- memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
- bigSize += smallSize;
- mAggregateBuffer->setRange(0, bigSize);
-
- // Keep looping until we run out of room in the mAggregateBuffer.
- needMoreData = true;
-
- ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
- smallSize, bigSize, mAggregateBuffer->capacity());
- }
- }
- } while (dropAccessUnit || needMoreData);
-
- // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
-
-#if 0
- int64_t mediaTimeUs;
- CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
- ALOGV("feeding %s input buffer at media time %.2f secs",
- audio ? "audio" : "video",
- mediaTimeUs / 1E6);
-#endif
-
- if (!audio) {
- mCCDecoder->decode(accessUnit);
- }
-
- if (doBufferAggregation && (mAggregateBuffer != NULL)) {
- ALOGV("feedDecoderInputData() reply with aggregated buffer, %zu",
- mAggregateBuffer->size());
- reply->setBuffer("buffer", mAggregateBuffer);
- mAggregateBuffer.clear();
- } else {
- reply->setBuffer("buffer", accessUnit);
- }
-
- reply->post();
-
- return OK;
-}
-
-void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
- // ALOGV("renderBuffer %s", audio ? "audio" : "video");
-
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
-
- if ((audio && mFlushingAudio != NONE)
- || (!audio && mFlushingVideo != NONE)) {
- // We're currently attempting to flush the decoder, in order
- // to complete this, the decoder wants all its buffers back,
- // so we don't want any output buffers it sent us (from before
- // we initiated the flush) to be stuck in the renderer's queue.
-
- ALOGV("we're still flushing the %s decoder, sending its output buffer"
- " right back.", audio ? "audio" : "video");
-
- reply->post();
- return;
- }
-
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
-
- int64_t mediaTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
- int64_t &skipUntilMediaTimeUs =
- audio
- ? mSkipRenderingAudioUntilMediaTimeUs
- : mSkipRenderingVideoUntilMediaTimeUs;
-
- if (skipUntilMediaTimeUs >= 0) {
-
- if (mediaTimeUs < skipUntilMediaTimeUs) {
- ALOGV("dropping %s buffer at time %lld as requested.",
- audio ? "audio" : "video",
- mediaTimeUs);
-
- reply->post();
- return;
- }
-
- skipUntilMediaTimeUs = -1;
- }
-
- if (!audio && mCCDecoder->isSelected()) {
- mCCDecoder->display(mediaTimeUs);
- }
-
- mRenderer->queueBuffer(audio, buffer, reply);
-}
-
void NuPlayer::updateVideoSize(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat) {
@@ -1566,12 +1329,11 @@ void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
driver->notifyListener(msg, ext1, ext2, in);
}
-void NuPlayer::flushDecoder(
- bool audio, bool needShutdown, const sp<AMessage> &newFormat) {
+void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
ALOGV("[%s] flushDecoder needShutdown=%d",
audio ? "audio" : "video", needShutdown);
- const sp<Decoder> &decoder = getDecoder(audio);
+ const sp<DecoderBase> &decoder = getDecoder(audio);
if (decoder == NULL) {
ALOGI("flushDecoder %s without decoder present",
audio ? "audio" : "video");
@@ -1582,8 +1344,7 @@ void NuPlayer::flushDecoder(
++mScanSourcesGeneration;
mScanSourcesPending = false;
- decoder->signalFlush(newFormat);
- mRenderer->flush(audio);
+ decoder->signalFlush();
FlushStatus newStatus =
needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
@@ -1598,33 +1359,17 @@ void NuPlayer::flushDecoder(
ALOGE_IF(mFlushingVideo != NONE,
"video flushDecoder() is called in state %d", mFlushingVideo);
mFlushingVideo = newStatus;
-
- if (mCCDecoder != NULL) {
- mCCDecoder->flush();
- }
}
}
-void NuPlayer::updateDecoderFormatWithoutFlush(
- bool audio, const sp<AMessage> &format) {
- ALOGV("[%s] updateDecoderFormatWithoutFlush", audio ? "audio" : "video");
-
- const sp<Decoder> &decoder = getDecoder(audio);
- if (decoder == NULL) {
- ALOGI("updateDecoderFormatWithoutFlush %s without decoder present",
- audio ? "audio" : "video");
- return;
- }
-
- decoder->signalUpdateFormat(format);
-}
-
void NuPlayer::queueDecoderShutdown(
bool audio, bool video, const sp<AMessage> &reply) {
ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, video));
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ video ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE));
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performScanSources));
@@ -1670,10 +1415,11 @@ status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const {
return err;
}
-status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
+status_t NuPlayer::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
msg->setSize("trackIndex", trackIndex);
msg->setInt32("select", select);
+ msg->setInt64("timeUs", timeUs);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
@@ -1699,8 +1445,13 @@ status_t NuPlayer::getCurrentPosition(int64_t *mediaUs) {
}
void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) {
- *numFramesTotal = mNumFramesTotal;
- *numFramesDropped = mNumFramesDropped;
+ sp<DecoderBase> decoder = getDecoder(false /* audio */);
+ if (decoder != NULL) {
+ decoder->getStats(numFramesTotal, numFramesDropped);
+ } else {
+ *numFramesTotal = 0;
+ *numFramesDropped = 0;
+ }
}
sp<MetaData> NuPlayer::getFileMeta() {
@@ -1757,52 +1508,23 @@ void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {
mSource->seekTo(seekTimeUs);
++mTimedTextGeneration;
- if (mDriver != NULL) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- if (needNotify) {
- driver->notifySeekComplete();
- }
- }
- }
-
// everything's flushed, continue playback.
}
-void NuPlayer::performDecoderFlush() {
- ALOGV("performDecoderFlush");
+void NuPlayer::performDecoderFlush(FlushCommand audio, FlushCommand video) {
+ ALOGV("performDecoderFlush audio=%d, video=%d", audio, video);
- if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+ if ((audio == FLUSH_CMD_NONE || mAudioDecoder == NULL)
+ && (video == FLUSH_CMD_NONE || mVideoDecoder == NULL)) {
return;
}
- mTimeDiscontinuityPending = true;
-
- if (mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, false /* needShutdown */);
- }
-
- if (mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, false /* needShutdown */);
- }
-}
-
-void NuPlayer::performDecoderShutdown(bool audio, bool video) {
- ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video);
-
- if ((!audio || mAudioDecoder == NULL)
- && (!video || mVideoDecoder == NULL)) {
- return;
- }
-
- mTimeDiscontinuityPending = true;
-
- if (audio && mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, true /* needShutdown */);
+ if (audio != FLUSH_CMD_NONE && mAudioDecoder != NULL) {
+ flushDecoder(true /* audio */, (audio == FLUSH_CMD_SHUTDOWN));
}
- if (video && mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, true /* needShutdown */);
+ if (video != FLUSH_CMD_NONE && mVideoDecoder != NULL) {
+ flushDecoder(false /* audio */, (video == FLUSH_CMD_SHUTDOWN));
}
}
@@ -1871,6 +1593,42 @@ void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) {
}
}
+void NuPlayer::performResumeDecoders(bool needNotify) {
+ if (needNotify) {
+ mResumePending = true;
+ if (mVideoDecoder == NULL) {
+ // if audio-only, we can notify seek complete now,
+ // as the resume operation will be relatively fast.
+ finishResume();
+ }
+ }
+
+ if (mVideoDecoder != NULL) {
+ // When there is continuous seek, MediaPlayer will cache the seek
+ // position, and send down new seek request when previous seek is
+ // complete. Let's wait for at least one video output frame before
+ // notifying seek complete, so that the video thumbnail gets updated
+ // when seekbar is dragged.
+ mVideoDecoder->signalResume(needNotify);
+ }
+
+ if (mAudioDecoder != NULL) {
+ mAudioDecoder->signalResume(false /* needNotify */);
+ }
+}
+
+void NuPlayer::finishResume() {
+ if (mResumePending) {
+ mResumePending = false;
+ if (mDriver != NULL) {
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver != NULL) {
+ driver->notifySeekComplete();
+ }
+ }
+ }
+}
+
void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
int32_t what;
CHECK(msg->findInt32("what", &what));
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 24c06c9..bb32eac 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -54,7 +54,6 @@ struct NuPlayer : public AHandler {
void start();
void pause();
- void resume();
// Will notify the driver through "notifyResetComplete" once finished.
void resetAsync();
@@ -66,14 +65,12 @@ struct NuPlayer : public AHandler {
status_t setVideoScalingMode(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
status_t getSelectedTrack(int32_t type, Parcel* reply) const;
- status_t selectTrack(size_t trackIndex, bool select);
+ status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
status_t getCurrentPosition(int64_t *mediaUs);
void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped);
sp<MetaData> getFileMeta();
- static const size_t kAggregateBufferSizeBytes;
-
protected:
virtual ~NuPlayer();
@@ -85,6 +82,7 @@ public:
private:
struct Decoder;
+ struct DecoderBase;
struct DecoderPassThrough;
struct CCDecoder;
struct GenericSource;
@@ -95,7 +93,8 @@ private:
struct Action;
struct SeekAction;
struct SetSurfaceAction;
- struct ShutdownDecoderAction;
+ struct ResumeDecoderAction;
+ struct FlushDecoderAction;
struct PostMessageAction;
struct SimpleAction;
@@ -129,10 +128,9 @@ private:
uint32_t mSourceFlags;
sp<NativeWindowWrapper> mNativeWindow;
sp<MediaPlayerBase::AudioSink> mAudioSink;
- sp<Decoder> mVideoDecoder;
- bool mVideoIsAVC;
+ sp<DecoderBase> mVideoDecoder;
bool mOffloadAudio;
- sp<Decoder> mAudioDecoder;
+ sp<DecoderBase> mAudioDecoder;
sp<CCDecoder> mCCDecoder;
sp<Renderer> mRenderer;
sp<ALooper> mRendererLooper;
@@ -160,32 +158,26 @@ private:
SHUT_DOWN,
};
- // Once the current flush is complete this indicates whether the
- // notion of time has changed.
- bool mTimeDiscontinuityPending;
+ enum FlushCommand {
+ FLUSH_CMD_NONE,
+ FLUSH_CMD_FLUSH,
+ FLUSH_CMD_SHUTDOWN,
+ };
// Status of flush responses from the decoder and renderer.
bool mFlushComplete[2][2];
- // Used by feedDecoderInputData to aggregate small buffers into
- // one large buffer.
- sp<ABuffer> mPendingAudioAccessUnit;
- status_t mPendingAudioErr;
- sp<ABuffer> mAggregateBuffer;
-
FlushStatus mFlushingAudio;
FlushStatus mFlushingVideo;
- int64_t mSkipRenderingAudioUntilMediaTimeUs;
- int64_t mSkipRenderingVideoUntilMediaTimeUs;
-
- int64_t mNumFramesTotal, mNumFramesDropped;
+ // Status of flush responses from the decoder and renderer.
+ bool mResumePending;
int32_t mVideoScalingMode;
bool mStarted;
- inline const sp<Decoder> &getDecoder(bool audio) {
+ inline const sp<DecoderBase> &getDecoder(bool audio) {
return audio ? mAudioDecoder : mVideoDecoder;
}
@@ -196,28 +188,28 @@ private:
mFlushComplete[1][1] = false;
}
- void openAudioSink(const sp<AMessage> &format, bool offloadOnly);
+ void tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo);
void closeAudioSink();
- status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
+ status_t instantiateDecoder(bool audio, sp<DecoderBase> *decoder);
void updateVideoSize(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat = NULL);
- status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
- void renderBuffer(bool audio, const sp<AMessage> &msg);
-
void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
+ void onStart();
+ void onResume();
+
bool audioDecoderStillNeeded();
- void flushDecoder(
- bool audio, bool needShutdown, const sp<AMessage> &newFormat = NULL);
- void updateDecoderFormatWithoutFlush(bool audio, const sp<AMessage> &format);
+ void flushDecoder(bool audio, bool needShutdown);
+
+ void finishResume();
void postScanSources();
@@ -227,11 +219,11 @@ private:
void processDeferredActions();
void performSeek(int64_t seekTimeUs, bool needNotify);
- void performDecoderFlush();
- void performDecoderShutdown(bool audio, bool video);
+ void performDecoderFlush(FlushCommand audio, FlushCommand video);
void performReset();
void performScanSources();
void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
+ void performResumeDecoders(bool needNotify);
void onSourceNotify(const sp<AMessage> &msg);
void onClosedCaptionNotify(const sp<AMessage> &msg);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
new file mode 100644
index 0000000..9229704
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerCCDecoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerCCDecoder.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+struct CCData {
+ CCData(uint8_t type, uint8_t data1, uint8_t data2)
+ : mType(type), mData1(data1), mData2(data2) {
+ }
+ bool getChannel(size_t *channel) const {
+ if (mData1 >= 0x10 && mData1 <= 0x1f) {
+ *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
+ return true;
+ }
+ return false;
+ }
+
+ uint8_t mType;
+ uint8_t mData1;
+ uint8_t mData2;
+};
+
+static bool isNullPad(CCData *cc) {
+ return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+static void dumpBytePair(const sp<ABuffer> &ccBuf) {
+ size_t offset = 0;
+ AString out;
+
+ while (offset < ccBuf->size()) {
+ char tmp[128];
+
+ CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+ if (isNullPad(cc)) {
+ // 1 null pad or XDS metadata, ignore
+ offset += sizeof(CCData);
+ continue;
+ }
+
+ if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+ // 2 basic chars
+ sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+ // 1 special char
+ sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Spanish/French char
+ sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Portuguese/German/Danish char
+ sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+ // Mid-Row Codes (Table 69)
+ sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+ ||
+ ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+ && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+ // Misc Control Codes (Table 70)
+ sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 & 0x70) == 0x10
+ && (cc->mData2 & 0x40) == 0x40
+ && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+ // Preamble Address Codes (Table 71)
+ sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else {
+ sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ }
+
+ if (out.size() > 0) {
+ out.append(", ");
+ }
+
+ out.append(tmp);
+
+ offset += sizeof(CCData);
+ }
+
+ ALOGI("%s", out.c_str());
+}
+
+NuPlayer::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mCurrentChannel(0),
+ mSelectedTrack(-1) {
+ for (size_t i = 0; i < sizeof(mTrackIndices)/sizeof(mTrackIndices[0]); ++i) {
+ mTrackIndices[i] = -1;
+ }
+}
+
+size_t NuPlayer::CCDecoder::getTrackCount() const {
+ return mFoundChannels.size();
+}
+
+sp<AMessage> NuPlayer::CCDecoder::getTrackInfo(size_t index) const {
+ if (!isTrackValid(index)) {
+ return NULL;
+ }
+
+ sp<AMessage> format = new AMessage();
+
+ format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+ format->setString("language", "und");
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+ //CC1, field 0 channel 0
+ bool isDefaultAuto = (mFoundChannels[index] == 0);
+ format->setInt32("auto", isDefaultAuto);
+ format->setInt32("default", isDefaultAuto);
+ format->setInt32("forced", 0);
+
+ return format;
+}
+
+status_t NuPlayer::CCDecoder::selectTrack(size_t index, bool select) {
+ if (!isTrackValid(index)) {
+ return BAD_VALUE;
+ }
+
+ if (select) {
+ if (mSelectedTrack == (ssize_t)index) {
+ ALOGE("track %zu already selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("selected track %zu", index);
+ mSelectedTrack = index;
+ } else {
+ if (mSelectedTrack != (ssize_t)index) {
+ ALOGE("track %zu is not selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("unselected track %zu", index);
+ mSelectedTrack = -1;
+ }
+
+ return OK;
+}
+
+bool NuPlayer::CCDecoder::isSelected() const {
+ return mSelectedTrack >= 0 && mSelectedTrack < (int32_t) getTrackCount();
+}
+
+bool NuPlayer::CCDecoder::isTrackValid(size_t index) const {
+ return index < getTrackCount();
+}
+
+int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const {
+ if (channel < sizeof(mTrackIndices)/sizeof(mTrackIndices[0])) {
+ return mTrackIndices[channel];
+ }
+ return -1;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ sp<ABuffer> sei;
+ if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+ return false;
+ }
+
+ bool trackAdded = false;
+
+ NALBitReader br(sei->data() + 1, sei->size() - 1);
+ // sei_message()
+ while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
+ uint32_t payload_type = 0;
+ size_t payload_size = 0;
+ uint8_t last_byte;
+
+ do {
+ last_byte = br.getBits(8);
+ payload_type += last_byte;
+ } while (last_byte == 0xFF);
+
+ do {
+ last_byte = br.getBits(8);
+ payload_size += last_byte;
+ } while (last_byte == 0xFF);
+
+ // sei_payload()
+ if (payload_type == 4) {
+ // user_data_registered_itu_t_t35()
+
+ // ATSC A/72: 6.4.2
+ uint8_t itu_t_t35_country_code = br.getBits(8);
+ uint16_t itu_t_t35_provider_code = br.getBits(16);
+ uint32_t user_identifier = br.getBits(32);
+ uint8_t user_data_type_code = br.getBits(8);
+
+ payload_size -= 1 + 2 + 4 + 1;
+
+ if (itu_t_t35_country_code == 0xB5
+ && itu_t_t35_provider_code == 0x0031
+ && user_identifier == 'GA94'
+ && user_data_type_code == 0x3) {
+ // MPEG_cc_data()
+ // ATSC A/53 Part 4: 6.2.3.1
+ br.skipBits(1); //process_em_data_flag
+ bool process_cc_data_flag = br.getBits(1);
+ br.skipBits(1); //additional_data_flag
+ size_t cc_count = br.getBits(5);
+ br.skipBits(8); // em_data;
+ payload_size -= 2;
+
+ if (process_cc_data_flag) {
+ AString out;
+
+ sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));
+ ccBuf->setRange(0, 0);
+
+ for (size_t i = 0; i < cc_count; i++) {
+ uint8_t marker = br.getBits(5);
+ CHECK_EQ(marker, 0x1f);
+
+ bool cc_valid = br.getBits(1);
+ uint8_t cc_type = br.getBits(2);
+ // remove odd parity bit
+ uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+ uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+ if (cc_valid
+ && (cc_type == 0 || cc_type == 1)) {
+ CCData cc(cc_type, cc_data_1, cc_data_2);
+ if (!isNullPad(&cc)) {
+ size_t channel;
+ if (cc.getChannel(&channel) && getTrackIndex(channel) < 0) {
+ mTrackIndices[channel] = mFoundChannels.size();
+ mFoundChannels.push_back(channel);
+ trackAdded = true;
+ }
+ memcpy(ccBuf->data() + ccBuf->size(),
+ (void *)&cc, sizeof(cc));
+ ccBuf->setRange(0, ccBuf->size() + sizeof(CCData));
+ }
+ }
+ }
+ payload_size -= cc_count * 3;
+
+ mCCMap.add(timeUs, ccBuf);
+ break;
+ }
+ } else {
+ ALOGV("Malformed SEI payload type 4");
+ }
+ } else {
+ ALOGV("Unsupported SEI payload type %d", payload_type);
+ }
+
+ // skipping remaining bits of this payload
+ br.skipBits(payload_size * 8);
+ }
+
+ return trackAdded;
+}
+
+sp<ABuffer> NuPlayer::CCDecoder::filterCCBuf(
+ const sp<ABuffer> &ccBuf, size_t index) {
+ sp<ABuffer> filteredCCBuf = new ABuffer(ccBuf->size());
+ filteredCCBuf->setRange(0, 0);
+
+ size_t cc_count = ccBuf->size() / sizeof(CCData);
+ const CCData* cc_data = (const CCData*)ccBuf->data();
+ for (size_t i = 0; i < cc_count; ++i) {
+ size_t channel;
+ if (cc_data[i].getChannel(&channel)) {
+ mCurrentChannel = channel;
+ }
+ if (mCurrentChannel == mFoundChannels[index]) {
+ memcpy(filteredCCBuf->data() + filteredCCBuf->size(),
+ (void *)&cc_data[i], sizeof(CCData));
+ filteredCCBuf->setRange(0, filteredCCBuf->size() + sizeof(CCData));
+ }
+ }
+
+ return filteredCCBuf;
+}
+
+void NuPlayer::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+ if (extractFromSEI(accessUnit)) {
+ ALOGI("Found CEA-608 track");
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTrackAdded);
+ msg->post();
+ }
+ // TODO: extract CC from other sources
+}
+
+void NuPlayer::CCDecoder::display(int64_t timeUs) {
+ if (!isTrackValid(mSelectedTrack)) {
+ ALOGE("Could not find current track(index=%d)", mSelectedTrack);
+ return;
+ }
+
+ ssize_t index = mCCMap.indexOfKey(timeUs);
+ if (index < 0) {
+ ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+ return;
+ }
+
+ sp<ABuffer> ccBuf = filterCCBuf(mCCMap.valueAt(index), mSelectedTrack);
+
+ if (ccBuf->size() > 0) {
+#if 0
+ dumpBytePair(ccBuf);
+#endif
+
+ ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+ ccBuf->meta()->setInt64("timeUs", timeUs);
+ ccBuf->meta()->setInt64("durationUs", 0ll);
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatClosedCaptionData);
+ msg->setBuffer("buffer", ccBuf);
+ msg->post();
+ }
+
+ // remove all entries before timeUs
+ mCCMap.removeItemsAt(0, index + 1);
+}
+
+void NuPlayer::CCDecoder::flush() {
+ mCCMap.clear();
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h
new file mode 100644
index 0000000..5e06f4e
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_CCDECODER_H_
+
+#define NUPLAYER_CCDECODER_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct NuPlayer::CCDecoder : public RefBase {
+ enum {
+ kWhatClosedCaptionData,
+ kWhatTrackAdded,
+ };
+
+ CCDecoder(const sp<AMessage> &notify);
+
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
+ status_t selectTrack(size_t index, bool select);
+ bool isSelected() const;
+ void decode(const sp<ABuffer> &accessUnit);
+ void display(int64_t timeUs);
+ void flush();
+
+private:
+ sp<AMessage> mNotify;
+ KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+ size_t mCurrentChannel;
+ int32_t mSelectedTrack;
+ int32_t mTrackIndices[4];
+ Vector<size_t> mFoundChannels;
+
+ bool isTrackValid(size_t index) const;
+ int32_t getTrackIndex(size_t channel) const;
+ bool extractFromSEI(const sp<ABuffer> &accessUnit);
+ sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index);
+
+ DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_CCDECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 27f6131..2abd9d6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,10 +19,12 @@
#include <utils/Log.h>
#include <inttypes.h>
+#include "NuPlayerCCDecoder.h"
#include "NuPlayerDecoder.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
#include <media/ICrypto.h>
-#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -31,71 +33,103 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
+#include "avc_utils.h"
+#include "ATSParser.h"
+
namespace android {
NuPlayer::Decoder::Decoder(
const sp<AMessage> &notify,
- const sp<NativeWindowWrapper> &nativeWindow)
- : mNotify(notify),
+ const sp<Source> &source,
+ const sp<Renderer> &renderer,
+ const sp<NativeWindowWrapper> &nativeWindow,
+ const sp<CCDecoder> &ccDecoder)
+ : DecoderBase(notify),
mNativeWindow(nativeWindow),
- mBufferGeneration(0),
+ mSource(source),
+ mRenderer(renderer),
+ mCCDecoder(ccDecoder),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
+ mNumFramesTotal(0ll),
+ mNumFramesDropped(0ll),
+ mIsAudio(true),
+ mIsVideoAVC(false),
+ mIsSecure(false),
+ mFormatChangePending(false),
mPaused(true),
+ mResumePending(false),
mComponentName("decoder") {
- // Every decoder has its own looper because MediaCodec operations
- // are blocking, but NuPlayer needs asynchronous operations.
- mDecoderLooper = new ALooper;
- mDecoderLooper->setName("NPDecoder");
- mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
-
mCodecLooper = new ALooper;
mCodecLooper->setName("NPDecoder-CL");
mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
NuPlayer::Decoder::~Decoder() {
- mDecoderLooper->unregisterHandler(id());
- mDecoderLooper->stop();
-
releaseAndResetMediaBuffers();
}
-static
-status_t PostAndAwaitResponse(
- const sp<AMessage> &msg, sp<AMessage> *response) {
- status_t err = msg->postAndAwaitResponse(response);
+void NuPlayer::Decoder::getStats(
+ int64_t *numFramesTotal,
+ int64_t *numFramesDropped) const {
+ *numFramesTotal = mNumFramesTotal;
+ *numFramesDropped = mNumFramesDropped;
+}
- if (err != OK) {
- return err;
- }
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
- if (!(*response)->findInt32("err", &err)) {
- err = OK;
- }
+ switch (msg->what()) {
+ case kWhatCodecNotify:
+ {
+ if (!isStaleReply(msg)) {
+ int32_t numInput, numOutput;
- return err;
-}
+ if (!msg->findInt32("input-buffers", &numInput)) {
+ numInput = INT32_MAX;
+ }
-void NuPlayer::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
- mCSDsForCurrentFormat.clear();
- for (int32_t i = 0; ; ++i) {
- AString tag = "csd-";
- tag.append(i);
- sp<ABuffer> buffer;
- if (!format->findBuffer(tag.c_str(), &buffer)) {
+ if (!msg->findInt32("output-buffers", &numOutput)) {
+ numOutput = INT32_MAX;
+ }
+
+ if (!mPaused) {
+ while (numInput-- > 0 && handleAnInputBuffer()) {}
+ }
+
+ while (numOutput-- > 0 && handleAnOutputBuffer()) {}
+ }
+
+ requestCodecNotification();
break;
}
- mCSDsForCurrentFormat.push(buffer);
+
+ case kWhatRenderBuffer:
+ {
+ if (!isStaleReply(msg)) {
+ onRenderBuffer(msg);
+ }
+ break;
+ }
+
+ default:
+ DecoderBase::onMessageReceived(msg);
+ break;
}
}
void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
CHECK(mCodec == NULL);
+ mFormatChangePending = false;
+
++mBufferGeneration;
AString mime;
CHECK(format->findString("mime", &mime));
+ mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+ mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
+
sp<Surface> surface = NULL;
if (mNativeWindow != NULL) {
surface = mNativeWindow->getSurfaceTextureClient();
@@ -123,6 +157,7 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
handleError(UNKNOWN_ERROR);
return;
}
+ mIsSecure = secure;
mCodec->getName(&mComponentName);
@@ -169,83 +204,141 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
mInputBuffers.size(),
mOutputBuffers.size());
- requestCodecNotification();
+ if (mRenderer != NULL) {
+ requestCodecNotification();
+ }
mPaused = false;
+ mResumePending = false;
}
-void NuPlayer::Decoder::releaseAndResetMediaBuffers() {
- for (size_t i = 0; i < mMediaBuffers.size(); i++) {
- if (mMediaBuffers[i] != NULL) {
- mMediaBuffers[i]->release();
- mMediaBuffers.editItemAt(i) = NULL;
- }
- }
- mMediaBuffers.resize(mInputBuffers.size());
- for (size_t i = 0; i < mMediaBuffers.size(); i++) {
- mMediaBuffers.editItemAt(i) = NULL;
+void NuPlayer::Decoder::onSetRenderer(const sp<Renderer> &renderer) {
+ bool hadNoRenderer = (mRenderer == NULL);
+ mRenderer = renderer;
+ if (hadNoRenderer && mRenderer != NULL) {
+ requestCodecNotification();
}
- mInputBufferIsDequeued.clear();
- mInputBufferIsDequeued.resize(mInputBuffers.size());
- for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
- mInputBufferIsDequeued.editItemAt(i) = false;
+}
+
+void NuPlayer::Decoder::onGetInputBuffers(
+ Vector<sp<ABuffer> > *dstBuffers) {
+ dstBuffers->clear();
+ for (size_t i = 0; i < mInputBuffers.size(); i++) {
+ dstBuffers->push(mInputBuffers[i]);
}
+}
- mPendingInputMessages.clear();
+void NuPlayer::Decoder::onResume(bool notifyComplete) {
+ mPaused = false;
+
+ if (notifyComplete) {
+ mResumePending = true;
+ }
}
-void NuPlayer::Decoder::requestCodecNotification() {
+void NuPlayer::Decoder::onFlush(bool notifyComplete) {
+ if (mCCDecoder != NULL) {
+ mCCDecoder->flush();
+ }
+
+ if (mRenderer != NULL) {
+ mRenderer->flush(mIsAudio, notifyComplete);
+ mRenderer->signalTimeDiscontinuity();
+ }
+
+ status_t err = OK;
if (mCodec != NULL) {
- sp<AMessage> reply = new AMessage(kWhatCodecNotify, id());
- reply->setInt32("generation", mBufferGeneration);
- mCodec->requestActivityNotification(reply);
+ err = mCodec->flush();
+ mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
+ ++mBufferGeneration;
}
-}
-bool NuPlayer::Decoder::isStaleReply(const sp<AMessage> &msg) {
- int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- return generation != mBufferGeneration;
-}
+ if (err != OK) {
+ ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatFlushCompleted.
+ // we attempt to release the buffers even if flush fails.
+ }
+ releaseAndResetMediaBuffers();
-void NuPlayer::Decoder::init() {
- mDecoderLooper->registerHandler(this);
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ mPaused = true;
+ }
}
-void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
- sp<AMessage> msg = new AMessage(kWhatConfigure, id());
- msg->setMessage("format", format);
- msg->post();
-}
+void NuPlayer::Decoder::onShutdown(bool notifyComplete) {
+ status_t err = OK;
-void NuPlayer::Decoder::signalUpdateFormat(const sp<AMessage> &format) {
- sp<AMessage> msg = new AMessage(kWhatUpdateFormat, id());
- msg->setMessage("format", format);
- msg->post();
-}
+ // if there is a pending resume request, notify complete now
+ notifyResumeCompleteIfNecessary();
+
+ if (mCodec != NULL) {
+ err = mCodec->release();
+ mCodec = NULL;
+ ++mBufferGeneration;
+
+ if (mNativeWindow != NULL) {
+ // reconnect to surface as MediaCodec disconnected from it
+ status_t error =
+ native_window_api_connect(
+ mNativeWindow->getNativeWindow().get(),
+ NATIVE_WINDOW_API_MEDIA);
+ ALOGW_IF(error != NO_ERROR,
+ "[%s] failed to connect to native window, error=%d",
+ mComponentName.c_str(), error);
+ }
+ mComponentName = "decoder";
+ }
-status_t NuPlayer::Decoder::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
- sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id());
- msg->setPointer("buffers", buffers);
+ releaseAndResetMediaBuffers();
- sp<AMessage> response;
- return PostAndAwaitResponse(msg, &response);
+ if (err != OK) {
+ ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatShutdownCompleted.
+ }
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ mPaused = true;
+ }
}
-void NuPlayer::Decoder::handleError(int32_t err)
-{
- // We cannot immediately release the codec due to buffers still outstanding
- // in the renderer. We signal to the player the error so it can shutdown/release the
- // decoder after flushing and increment the generation to discard unnecessary messages.
+void NuPlayer::Decoder::doRequestBuffers() {
+ if (mFormatChangePending) {
+ return;
+ }
+ status_t err = OK;
+ while (!mDequeuedInputBuffers.empty()) {
+ size_t bufferIx = *mDequeuedInputBuffers.begin();
+ sp<AMessage> msg = new AMessage();
+ msg->setSize("buffer-ix", bufferIx);
+ err = fetchInputData(msg);
+ if (err != OK) {
+ break;
+ }
+ mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin());
- ++mBufferGeneration;
+ if (!mPendingInputMessages.empty()
+ || !onInputBufferFetched(msg)) {
+ mPendingInputMessages.push_back(msg);
+ }
+ }
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatError);
- notify->setInt32("err", err);
- notify->post();
+ if (err == -EWOULDBLOCK
+ && mSource->feedMoreTSData() == OK) {
+ scheduleRequestBuffers();
+ }
}
bool NuPlayer::Decoder::handleAnInputBuffer() {
+ if (mFormatChangePending) {
+ return false;
+ }
size_t bufferIx = -1;
status_t res = mCodec->dequeueInputBuffer(&bufferIx);
ALOGV("[%s] dequeued input: %d",
@@ -267,22 +360,21 @@ bool NuPlayer::Decoder::handleAnInputBuffer() {
}
mInputBufferIsDequeued.editItemAt(bufferIx) = true;
- sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
- reply->setSize("buffer-ix", bufferIx);
- reply->setInt32("generation", mBufferGeneration);
-
if (!mCSDsToSubmit.isEmpty()) {
+ sp<AMessage> msg = new AMessage();
+ msg->setSize("buffer-ix", bufferIx);
+
sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);
ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
- reply->setBuffer("buffer", buffer);
+ msg->setBuffer("buffer", buffer);
mCSDsToSubmit.removeAt(0);
- CHECK(onInputBufferFilled(reply));
+ CHECK(onInputBufferFetched(msg));
return true;
}
while (!mPendingInputMessages.empty()) {
sp<AMessage> msg = *mPendingInputMessages.begin();
- if (!onInputBufferFilled(msg)) {
+ if (!onInputBufferFetched(msg)) {
break;
}
mPendingInputMessages.erase(mPendingInputMessages.begin());
@@ -292,15 +384,273 @@ bool NuPlayer::Decoder::handleAnInputBuffer() {
return true;
}
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatFillThisBuffer);
- notify->setBuffer("buffer", mInputBuffers[bufferIx]);
- notify->setMessage("reply", reply);
- notify->post();
+ mDequeuedInputBuffers.push_back(bufferIx);
+
+ onRequestInputBuffers();
return true;
}
-bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
+bool NuPlayer::Decoder::handleAnOutputBuffer() {
+ if (mFormatChangePending) {
+ return false;
+ }
+ size_t bufferIx = -1;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ status_t res = mCodec->dequeueOutputBuffer(
+ &bufferIx, &offset, &size, &timeUs, &flags);
+
+ if (res != OK) {
+ ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res);
+ } else {
+ ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")",
+ mComponentName.c_str(), (int)bufferIx, timeUs, flags);
+ }
+
+ if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
+ res = mCodec->getOutputBuffers(&mOutputBuffers);
+ if (res != OK) {
+ ALOGE("Failed to get output buffers for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+ // NuPlayer ignores this
+ return true;
+ } else if (res == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format = new AMessage();
+ res = mCodec->getOutputFormat(&format);
+ if (res != OK) {
+ ALOGE("Failed to get output format for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+
+ if (!mIsAudio) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatVideoSizeChanged);
+ notify->setMessage("format", format);
+ notify->post();
+ } else if (mRenderer != NULL) {
+ uint32_t flags;
+ int64_t durationUs;
+ bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+ if (!hasVideo &&
+ mSource->getDuration(&durationUs) == OK &&
+ durationUs
+ > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ } else {
+ flags = AUDIO_OUTPUT_FLAG_NONE;
+ }
+
+ res = mRenderer->openAudioSink(
+ format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaded */);
+ if (res != OK) {
+ ALOGE("Failed to open AudioSink on format change for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+ }
+ return true;
+ } else if (res == INFO_DISCONTINUITY) {
+ // nothing to do
+ return true;
+ } else if (res != OK) {
+ if (res != -EAGAIN) {
+ ALOGE("Failed to dequeue output buffer for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ }
+ return false;
+ }
+
+ CHECK_LT(bufferIx, mOutputBuffers.size());
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->setRange(offset, size);
+ buffer->meta()->clear();
+ buffer->meta()->setInt64("timeUs", timeUs);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ buffer->meta()->setInt32("eos", true);
+ notifyResumeCompleteIfNecessary();
+ }
+ // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+ sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
+
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
+
+ reply->post();
+ return true;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ // wait until 1st frame comes out to signal resume complete
+ notifyResumeCompleteIfNecessary();
+
+ if (mRenderer != NULL) {
+ // send the buffer to renderer.
+ mRenderer->queueBuffer(mIsAudio, buffer, reply);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+ }
+ }
+
+ return true;
+}
+
+void NuPlayer::Decoder::releaseAndResetMediaBuffers() {
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ if (mMediaBuffers[i] != NULL) {
+ mMediaBuffers[i]->release();
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ }
+ mMediaBuffers.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ mInputBufferIsDequeued.clear();
+ mInputBufferIsDequeued.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
+ mInputBufferIsDequeued.editItemAt(i) = false;
+ }
+
+ mPendingInputMessages.clear();
+ mDequeuedInputBuffers.clear();
+ mSkipRenderingUntilMediaTimeUs = -1;
+}
+
+void NuPlayer::Decoder::requestCodecNotification() {
+ if (mFormatChangePending) {
+ return;
+ }
+ if (mCodec != NULL) {
+ sp<AMessage> reply = new AMessage(kWhatCodecNotify, id());
+ reply->setInt32("generation", mBufferGeneration);
+ mCodec->requestActivityNotification(reply);
+ }
+}
+
+bool NuPlayer::Decoder::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
+
+status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) {
+ sp<ABuffer> accessUnit;
+ bool dropAccessUnit;
+ do {
+ status_t err = mSource->dequeueAccessUnit(mIsAudio, &accessUnit);
+
+ if (err == -EWOULDBLOCK) {
+ return err;
+ } else if (err != OK) {
+ if (err == INFO_DISCONTINUITY) {
+ int32_t type;
+ CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+ bool formatChange =
+ (mIsAudio &&
+ (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
+ || (!mIsAudio &&
+ (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
+
+ bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+ ALOGI("%s discontinuity (format=%d, time=%d)",
+ mIsAudio ? "audio" : "video", formatChange, timeChange);
+
+ bool seamlessFormatChange = false;
+ sp<AMessage> newFormat = mSource->getFormat(mIsAudio);
+ if (formatChange) {
+ seamlessFormatChange =
+ supportsSeamlessFormatChange(newFormat);
+ // treat seamless format change separately
+ formatChange = !seamlessFormatChange;
+ }
+
+ if (formatChange || timeChange) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatInputDiscontinuity);
+ msg->setInt32("formatChange", formatChange);
+ msg->post();
+ }
+
+ if (formatChange /* not seamless */) {
+ // must change decoder
+ // return EOS and wait to be killed
+ mFormatChangePending = true;
+ return ERROR_END_OF_STREAM;
+ } else if (timeChange) {
+ // need to flush
+ // TODO: Ideally we shouldn't need a flush upon time
+ // discontinuity, flushing will cause loss of frames.
+ // We probably should queue a time change marker to the
+ // output queue, and handles it in renderer instead.
+ rememberCodecSpecificData(newFormat);
+ onFlush(false /* notifyComplete */);
+ err = OK;
+ } else if (seamlessFormatChange) {
+ // reuse existing decoder and don't flush
+ rememberCodecSpecificData(newFormat);
+ err = OK;
+ } else {
+ // This stream is unaffected by the discontinuity
+ return -EWOULDBLOCK;
+ }
+ }
+
+ reply->setInt32("err", err);
+ return OK;
+ }
+
+ if (!mIsAudio) {
+ ++mNumFramesTotal;
+ }
+
+ dropAccessUnit = false;
+ if (!mIsAudio
+ && !mIsSecure
+ && mRenderer->getVideoLateByUs() > 100000ll
+ && mIsVideoAVC
+ && !IsAVCReferenceFrame(accessUnit)) {
+ dropAccessUnit = true;
+ ++mNumFramesDropped;
+ }
+ } while (dropAccessUnit);
+
+ // ALOGV("returned a valid buffer of %s data", mIsAudio ? "mIsAudio" : "video");
+#if 0
+ int64_t mediaTimeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("feeding %s input buffer at media time %.2f secs",
+ mIsAudio ? "audio" : "video",
+ mediaTimeUs / 1E6);
+#endif
+
+ if (mCCDecoder != NULL) {
+ mCCDecoder->decode(accessUnit);
+ }
+
+ reply->setBuffer("buffer", accessUnit);
+
+ return OK;
+}
+
+bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
size_t bufferIx;
CHECK(msg->findSize("buffer-ix", &bufferIx));
CHECK_LT(bufferIx, mInputBuffers.size());
@@ -342,8 +692,6 @@ bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
}
}
-
-
if (buffer == NULL /* includes !hasBuffer */) {
int32_t streamErr = ERROR_END_OF_STREAM;
CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
@@ -375,6 +723,17 @@ bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
handleError(streamErr);
}
} else {
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
int64_t timeUs = 0;
uint32_t flags = 0;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
@@ -418,96 +777,22 @@ bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
return true;
}
-bool NuPlayer::Decoder::handleAnOutputBuffer() {
- size_t bufferIx = -1;
- size_t offset;
- size_t size;
- int64_t timeUs;
- uint32_t flags;
- status_t res = mCodec->dequeueOutputBuffer(
- &bufferIx, &offset, &size, &timeUs, &flags);
-
- if (res != OK) {
- ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res);
- } else {
- ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")",
- mComponentName.c_str(), (int)bufferIx, timeUs, flags);
- }
-
- if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
- res = mCodec->getOutputBuffers(&mOutputBuffers);
- if (res != OK) {
- ALOGE("Failed to get output buffers for %s after INFO event (err=%d)",
- mComponentName.c_str(), res);
- handleError(res);
- return false;
- }
- // NuPlayer ignores this
- return true;
- } else if (res == INFO_FORMAT_CHANGED) {
- sp<AMessage> format = new AMessage();
- res = mCodec->getOutputFormat(&format);
- if (res != OK) {
- ALOGE("Failed to get output format for %s after INFO event (err=%d)",
- mComponentName.c_str(), res);
- handleError(res);
- return false;
- }
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
- notify->setMessage("format", format);
- notify->post();
- return true;
- } else if (res == INFO_DISCONTINUITY) {
- // nothing to do
- return true;
- } else if (res != OK) {
- if (res != -EAGAIN) {
- ALOGE("Failed to dequeue output buffer for %s (err=%d)",
- mComponentName.c_str(), res);
- handleError(res);
- }
- return false;
- }
-
- CHECK_LT(bufferIx, mOutputBuffers.size());
- sp<ABuffer> buffer = mOutputBuffers[bufferIx];
- buffer->setRange(offset, size);
- buffer->meta()->clear();
- buffer->meta()->setInt64("timeUs", timeUs);
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- buffer->meta()->setInt32("eos", true);
- }
- // we do not expect CODECCONFIG or SYNCFRAME for decoder
-
- sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id());
- reply->setSize("buffer-ix", bufferIx);
- reply->setInt32("generation", mBufferGeneration);
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatDrainThisBuffer);
- notify->setBuffer("buffer", buffer);
- notify->setMessage("reply", reply);
- notify->post();
-
- // FIXME: This should be handled after rendering is complete,
- // but Renderer needs it now
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- ALOGV("queueing eos [%s]", mComponentName.c_str());
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatEOS);
- notify->setInt32("err", ERROR_END_OF_STREAM);
- notify->post();
- }
- return true;
-}
-
void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
status_t err;
int32_t render;
size_t bufferIx;
CHECK(msg->findSize("buffer-ix", &bufferIx));
+
+ if (!mIsAudio) {
+ int64_t timeUs;
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->meta()->findInt64("timeUs", &timeUs);
+
+ if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
+ mCCDecoder->display(timeUs);
+ }
+ }
+
if (msg->findInt32("render", &render) && render) {
int64_t timestampNs;
CHECK(msg->findInt64("timestampNs", &timestampNs));
@@ -522,192 +807,8 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
}
}
-void NuPlayer::Decoder::onFlush() {
- status_t err = OK;
- if (mCodec != NULL) {
- err = mCodec->flush();
- mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
- ++mBufferGeneration;
- }
-
- if (err != OK) {
- ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
- handleError(err);
- // finish with posting kWhatFlushCompleted.
- // we attempt to release the buffers even if flush fails.
- }
- releaseAndResetMediaBuffers();
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatFlushCompleted);
- notify->post();
- mPaused = true;
-}
-
-void NuPlayer::Decoder::onResume() {
- mPaused = false;
-}
-
-void NuPlayer::Decoder::onShutdown() {
- status_t err = OK;
- if (mCodec != NULL) {
- err = mCodec->release();
- mCodec = NULL;
- ++mBufferGeneration;
-
- if (mNativeWindow != NULL) {
- // reconnect to surface as MediaCodec disconnected from it
- status_t error =
- native_window_api_connect(
- mNativeWindow->getNativeWindow().get(),
- NATIVE_WINDOW_API_MEDIA);
- ALOGW_IF(error != NO_ERROR,
- "[%s] failed to connect to native window, error=%d",
- mComponentName.c_str(), error);
- }
- mComponentName = "decoder";
- }
-
- releaseAndResetMediaBuffers();
-
- if (err != OK) {
- ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
- handleError(err);
- // finish with posting kWhatShutdownCompleted.
- }
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatShutdownCompleted);
- notify->post();
- mPaused = true;
-}
-
-void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
- ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
-
- switch (msg->what()) {
- case kWhatConfigure:
- {
- sp<AMessage> format;
- CHECK(msg->findMessage("format", &format));
- onConfigure(format);
- break;
- }
-
- case kWhatUpdateFormat:
- {
- sp<AMessage> format;
- CHECK(msg->findMessage("format", &format));
- rememberCodecSpecificData(format);
- break;
- }
-
- case kWhatGetInputBuffers:
- {
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
- Vector<sp<ABuffer> > *dstBuffers;
- CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
-
- dstBuffers->clear();
- for (size_t i = 0; i < mInputBuffers.size(); i++) {
- dstBuffers->push(mInputBuffers[i]);
- }
-
- (new AMessage)->postReply(replyID);
- break;
- }
-
- case kWhatCodecNotify:
- {
- if (!isStaleReply(msg)) {
- int32_t numInput, numOutput;
-
- if (!msg->findInt32("input-buffers", &numInput)) {
- numInput = INT32_MAX;
- }
-
- if (!msg->findInt32("output-buffers", &numOutput)) {
- numOutput = INT32_MAX;
- }
-
- if (!mPaused) {
- while (numInput-- > 0 && handleAnInputBuffer()) {}
- }
-
- while (numOutput-- > 0 && handleAnOutputBuffer()) {}
- }
-
- requestCodecNotification();
- break;
- }
-
- case kWhatInputBufferFilled:
- {
- if (!isStaleReply(msg)) {
- if (!mPendingInputMessages.empty()
- || !onInputBufferFilled(msg)) {
- mPendingInputMessages.push_back(msg);
- }
- }
-
- break;
- }
-
- case kWhatRenderBuffer:
- {
- if (!isStaleReply(msg)) {
- onRenderBuffer(msg);
- }
- break;
- }
-
- case kWhatFlush:
- {
- sp<AMessage> format;
- if (msg->findMessage("new-format", &format)) {
- rememberCodecSpecificData(format);
- }
- onFlush();
- break;
- }
-
- case kWhatResume:
- {
- onResume();
- break;
- }
-
- case kWhatShutdown:
- {
- onShutdown();
- break;
- }
-
- default:
- TRESPASS();
- break;
- }
-}
-
-void NuPlayer::Decoder::signalFlush(const sp<AMessage> &format) {
- sp<AMessage> msg = new AMessage(kWhatFlush, id());
- if (format != NULL) {
- msg->setMessage("new-format", format);
- }
- msg->post();
-}
-
-void NuPlayer::Decoder::signalResume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
-void NuPlayer::Decoder::initiateShutdown() {
- (new AMessage(kWhatShutdown, id()))->post();
-}
-
-bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const {
+bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(
+ const sp<AMessage> &targetFormat) const {
if (targetFormat == NULL) {
return true;
}
@@ -772,332 +873,30 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
return seamless;
}
-struct CCData {
- CCData(uint8_t type, uint8_t data1, uint8_t data2)
- : mType(type), mData1(data1), mData2(data2) {
- }
- bool getChannel(size_t *channel) const {
- if (mData1 >= 0x10 && mData1 <= 0x1f) {
- *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
- return true;
- }
- return false;
- }
-
- uint8_t mType;
- uint8_t mData1;
- uint8_t mData2;
-};
-
-static bool isNullPad(CCData *cc) {
- return cc->mData1 < 0x10 && cc->mData2 < 0x10;
-}
-
-static void dumpBytePair(const sp<ABuffer> &ccBuf) {
- size_t offset = 0;
- AString out;
-
- while (offset < ccBuf->size()) {
- char tmp[128];
-
- CCData *cc = (CCData *) (ccBuf->data() + offset);
-
- if (isNullPad(cc)) {
- // 1 null pad or XDS metadata, ignore
- offset += sizeof(CCData);
- continue;
- }
-
- if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
- // 2 basic chars
- sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
- } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
- && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
- // 1 special char
- sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
- && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
- // 1 Spanish/French char
- sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
- && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
- // 1 Portuguese/German/Danish char
- sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
- && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
- // Mid-Row Codes (Table 69)
- sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
- && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
- ||
- ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
- && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
- // Misc Control Codes (Table 70)
- sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else if ((cc->mData1 & 0x70) == 0x10
- && (cc->mData2 & 0x40) == 0x40
- && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
- // Preamble Address Codes (Table 71)
- sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- } else {
- sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
- }
-
- if (out.size() > 0) {
- out.append(", ");
- }
-
- out.append(tmp);
-
- offset += sizeof(CCData);
- }
-
- ALOGI("%s", out.c_str());
-}
-
-NuPlayer::CCDecoder::CCDecoder(const sp<AMessage> &notify)
- : mNotify(notify),
- mCurrentChannel(0),
- mSelectedTrack(-1) {
- for (size_t i = 0; i < sizeof(mTrackIndices)/sizeof(mTrackIndices[0]); ++i) {
- mTrackIndices[i] = -1;
- }
-}
-
-size_t NuPlayer::CCDecoder::getTrackCount() const {
- return mFoundChannels.size();
-}
-
-sp<AMessage> NuPlayer::CCDecoder::getTrackInfo(size_t index) const {
- if (!isTrackValid(index)) {
- return NULL;
- }
-
- sp<AMessage> format = new AMessage();
-
- format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
- format->setString("language", "und");
- format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
- //CC1, field 0 channel 0
- bool isDefaultAuto = (mFoundChannels[index] == 0);
- format->setInt32("auto", isDefaultAuto);
- format->setInt32("default", isDefaultAuto);
- format->setInt32("forced", 0);
-
- return format;
-}
-
-status_t NuPlayer::CCDecoder::selectTrack(size_t index, bool select) {
- if (!isTrackValid(index)) {
- return BAD_VALUE;
- }
-
- if (select) {
- if (mSelectedTrack == (ssize_t)index) {
- ALOGE("track %zu already selected", index);
- return BAD_VALUE;
- }
- ALOGV("selected track %zu", index);
- mSelectedTrack = index;
- } else {
- if (mSelectedTrack != (ssize_t)index) {
- ALOGE("track %zu is not selected", index);
- return BAD_VALUE;
- }
- ALOGV("unselected track %zu", index);
- mSelectedTrack = -1;
- }
-
- return OK;
-}
-
-bool NuPlayer::CCDecoder::isSelected() const {
- return mSelectedTrack >= 0 && mSelectedTrack < (int32_t) getTrackCount();
-}
-
-bool NuPlayer::CCDecoder::isTrackValid(size_t index) const {
- return index < getTrackCount();
-}
-
-int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const {
- if (channel < sizeof(mTrackIndices)/sizeof(mTrackIndices[0])) {
- return mTrackIndices[channel];
- }
- return -1;
-}
-
-// returns true if a new CC track is found
-bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
- int64_t timeUs;
- CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
- sp<ABuffer> sei;
- if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
- return false;
- }
-
- bool trackAdded = false;
-
- NALBitReader br(sei->data() + 1, sei->size() - 1);
- // sei_message()
- while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
- uint32_t payload_type = 0;
- size_t payload_size = 0;
- uint8_t last_byte;
-
- do {
- last_byte = br.getBits(8);
- payload_type += last_byte;
- } while (last_byte == 0xFF);
-
- do {
- last_byte = br.getBits(8);
- payload_size += last_byte;
- } while (last_byte == 0xFF);
-
- // sei_payload()
- if (payload_type == 4) {
- // user_data_registered_itu_t_t35()
-
- // ATSC A/72: 6.4.2
- uint8_t itu_t_t35_country_code = br.getBits(8);
- uint16_t itu_t_t35_provider_code = br.getBits(16);
- uint32_t user_identifier = br.getBits(32);
- uint8_t user_data_type_code = br.getBits(8);
-
- payload_size -= 1 + 2 + 4 + 1;
-
- if (itu_t_t35_country_code == 0xB5
- && itu_t_t35_provider_code == 0x0031
- && user_identifier == 'GA94'
- && user_data_type_code == 0x3) {
- // MPEG_cc_data()
- // ATSC A/53 Part 4: 6.2.3.1
- br.skipBits(1); //process_em_data_flag
- bool process_cc_data_flag = br.getBits(1);
- br.skipBits(1); //additional_data_flag
- size_t cc_count = br.getBits(5);
- br.skipBits(8); // em_data;
- payload_size -= 2;
-
- if (process_cc_data_flag) {
- AString out;
-
- sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));
- ccBuf->setRange(0, 0);
-
- for (size_t i = 0; i < cc_count; i++) {
- uint8_t marker = br.getBits(5);
- CHECK_EQ(marker, 0x1f);
-
- bool cc_valid = br.getBits(1);
- uint8_t cc_type = br.getBits(2);
- // remove odd parity bit
- uint8_t cc_data_1 = br.getBits(8) & 0x7f;
- uint8_t cc_data_2 = br.getBits(8) & 0x7f;
-
- if (cc_valid
- && (cc_type == 0 || cc_type == 1)) {
- CCData cc(cc_type, cc_data_1, cc_data_2);
- if (!isNullPad(&cc)) {
- size_t channel;
- if (cc.getChannel(&channel) && getTrackIndex(channel) < 0) {
- mTrackIndices[channel] = mFoundChannels.size();
- mFoundChannels.push_back(channel);
- trackAdded = true;
- }
- memcpy(ccBuf->data() + ccBuf->size(),
- (void *)&cc, sizeof(cc));
- ccBuf->setRange(0, ccBuf->size() + sizeof(CCData));
- }
- }
- }
- payload_size -= cc_count * 3;
-
- mCCMap.add(timeUs, ccBuf);
- break;
- }
- } else {
- ALOGV("Malformed SEI payload type 4");
- }
- } else {
- ALOGV("Unsupported SEI payload type %d", payload_type);
- }
-
- // skipping remaining bits of this payload
- br.skipBits(payload_size * 8);
+void NuPlayer::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
+ if (format == NULL) {
+ return;
}
-
- return trackAdded;
-}
-
-sp<ABuffer> NuPlayer::CCDecoder::filterCCBuf(
- const sp<ABuffer> &ccBuf, size_t index) {
- sp<ABuffer> filteredCCBuf = new ABuffer(ccBuf->size());
- filteredCCBuf->setRange(0, 0);
-
- size_t cc_count = ccBuf->size() / sizeof(CCData);
- const CCData* cc_data = (const CCData*)ccBuf->data();
- for (size_t i = 0; i < cc_count; ++i) {
- size_t channel;
- if (cc_data[i].getChannel(&channel)) {
- mCurrentChannel = channel;
- }
- if (mCurrentChannel == mFoundChannels[index]) {
- memcpy(filteredCCBuf->data() + filteredCCBuf->size(),
- (void *)&cc_data[i], sizeof(CCData));
- filteredCCBuf->setRange(0, filteredCCBuf->size() + sizeof(CCData));
+ mCSDsForCurrentFormat.clear();
+ for (int32_t i = 0; ; ++i) {
+ AString tag = "csd-";
+ tag.append(i);
+ sp<ABuffer> buffer;
+ if (!format->findBuffer(tag.c_str(), &buffer)) {
+ break;
}
+ mCSDsForCurrentFormat.push(buffer);
}
-
- return filteredCCBuf;
-}
-
-void NuPlayer::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
- if (extractFromSEI(accessUnit)) {
- ALOGI("Found CEA-608 track");
- sp<AMessage> msg = mNotify->dup();
- msg->setInt32("what", kWhatTrackAdded);
- msg->post();
- }
- // TODO: extract CC from other sources
}
-void NuPlayer::CCDecoder::display(int64_t timeUs) {
- if (!isTrackValid(mSelectedTrack)) {
- ALOGE("Could not find current track(index=%d)", mSelectedTrack);
- return;
- }
-
- ssize_t index = mCCMap.indexOfKey(timeUs);
- if (index < 0) {
- ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
- return;
- }
-
- sp<ABuffer> ccBuf = filterCCBuf(mCCMap.valueAt(index), mSelectedTrack);
+void NuPlayer::Decoder::notifyResumeCompleteIfNecessary() {
+ if (mResumePending) {
+ mResumePending = false;
- if (ccBuf->size() > 0) {
-#if 0
- dumpBytePair(ccBuf);
-#endif
-
- ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
- ccBuf->meta()->setInt64("timeUs", timeUs);
- ccBuf->meta()->setInt64("durationUs", 0ll);
-
- sp<AMessage> msg = mNotify->dup();
- msg->setInt32("what", kWhatClosedCaptionData);
- msg->setBuffer("buffer", ccBuf);
- msg->post();
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatResumeCompleted);
+ notify->post();
}
-
- // remove all entries before timeUs
- mCCMap.removeItemsAt(0, index + 1);
-}
-
-void NuPlayer::CCDecoder::flush() {
- mCCMap.clear();
}
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index dba3eee..1bfa94f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,70 +15,54 @@
*/
#ifndef NUPLAYER_DECODER_H_
-
#define NUPLAYER_DECODER_H_
#include "NuPlayer.h"
-#include <media/stagefright/foundation/AHandler.h>
+#include "NuPlayerDecoderBase.h"
namespace android {
-struct ABuffer;
-struct MediaCodec;
-struct MediaBuffer;
-
-struct NuPlayer::Decoder : public AHandler {
+struct NuPlayer::Decoder : public DecoderBase {
Decoder(const sp<AMessage> &notify,
- const sp<NativeWindowWrapper> &nativeWindow = NULL);
-
- virtual void configure(const sp<AMessage> &format);
- virtual void init();
-
- status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
- virtual void signalFlush(const sp<AMessage> &format = NULL);
- virtual void signalUpdateFormat(const sp<AMessage> &format);
- virtual void signalResume();
- virtual void initiateShutdown();
+ const sp<Source> &source,
+ const sp<Renderer> &renderer = NULL,
+ const sp<NativeWindowWrapper> &nativeWindow = NULL,
+ const sp<CCDecoder> &ccDecoder = NULL);
- virtual bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
-
- enum {
- kWhatFillThisBuffer = 'flTB',
- kWhatDrainThisBuffer = 'drTB',
- kWhatOutputFormatChanged = 'fmtC',
- kWhatFlushCompleted = 'flsC',
- kWhatShutdownCompleted = 'shDC',
- kWhatEOS = 'eos ',
- kWhatError = 'err ',
- };
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const;
protected:
-
virtual ~Decoder();
virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual void onConfigure(const sp<AMessage> &format);
+ virtual void onSetRenderer(const sp<Renderer> &renderer);
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onResume(bool notifyComplete);
+ virtual void onFlush(bool notifyComplete);
+ virtual void onShutdown(bool notifyComplete);
+ virtual void doRequestBuffers();
+
private:
enum {
- kWhatCodecNotify = 'cdcN',
- kWhatConfigure = 'conf',
- kWhatGetInputBuffers = 'gInB',
- kWhatInputBufferFilled = 'inpF',
- kWhatRenderBuffer = 'rndr',
- kWhatFlush = 'flus',
- kWhatShutdown = 'shuD',
- kWhatUpdateFormat = 'uFmt',
+ kWhatCodecNotify = 'cdcN',
+ kWhatRenderBuffer = 'rndr',
};
- sp<AMessage> mNotify;
sp<NativeWindowWrapper> mNativeWindow;
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+ sp<CCDecoder> mCCDecoder;
+
sp<AMessage> mInputFormat;
sp<AMessage> mOutputFormat;
sp<MediaCodec> mCodec;
sp<ALooper> mCodecLooper;
- sp<ALooper> mDecoderLooper;
List<sp<AMessage> > mPendingInputMessages;
@@ -88,8 +72,20 @@ private:
Vector<sp<ABuffer> > mCSDsToSubmit;
Vector<bool> mInputBufferIsDequeued;
Vector<MediaBuffer *> mMediaBuffers;
+ Vector<size_t> mDequeuedInputBuffers;
+
+ int64_t mSkipRenderingUntilMediaTimeUs;
+ int64_t mNumFramesTotal;
+ int64_t mNumFramesDropped;
+ bool mIsAudio;
+ bool mIsVideoAVC;
+ bool mIsSecure;
+ bool mFormatChangePending;
+
+ bool mPaused;
+ bool mResumePending;
+ AString mComponentName;
- void handleError(int32_t err);
bool handleAnInputBuffer();
bool handleAnOutputBuffer();
@@ -97,53 +93,17 @@ private:
void requestCodecNotification();
bool isStaleReply(const sp<AMessage> &msg);
- void onConfigure(const sp<AMessage> &format);
- void onFlush();
- void onResume();
- bool onInputBufferFilled(const sp<AMessage> &msg);
+ status_t fetchInputData(sp<AMessage> &reply);
+ bool onInputBufferFetched(const sp<AMessage> &msg);
void onRenderBuffer(const sp<AMessage> &msg);
- void onShutdown();
-
- int32_t mBufferGeneration;
- bool mPaused;
- AString mComponentName;
+ bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
void rememberCodecSpecificData(const sp<AMessage> &format);
- DISALLOW_EVIL_CONSTRUCTORS(Decoder);
-};
+ void notifyResumeCompleteIfNecessary();
-struct NuPlayer::CCDecoder : public RefBase {
- enum {
- kWhatClosedCaptionData,
- kWhatTrackAdded,
- };
-
- CCDecoder(const sp<AMessage> &notify);
-
- size_t getTrackCount() const;
- sp<AMessage> getTrackInfo(size_t index) const;
- status_t selectTrack(size_t index, bool select);
- bool isSelected() const;
- void decode(const sp<ABuffer> &accessUnit);
- void display(int64_t timeUs);
- void flush();
-
-private:
- sp<AMessage> mNotify;
- KeyedVector<int64_t, sp<ABuffer> > mCCMap;
- size_t mCurrentChannel;
- int32_t mSelectedTrack;
- int32_t mTrackIndices[4];
- Vector<size_t> mFoundChannels;
-
- bool isTrackValid(size_t index) const;
- int32_t getTrackIndex(size_t channel) const;
- bool extractFromSEI(const sp<ABuffer> &accessUnit);
- sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index);
-
- DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+ DISALLOW_EVIL_CONSTRUCTORS(Decoder);
};
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
new file mode 100644
index 0000000..d56fc4d
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoderBase"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerDecoderBase.h"
+
+#include "NuPlayerRenderer.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer::DecoderBase::DecoderBase(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mBufferGeneration(0),
+ mRequestInputBuffersPending(false) {
+ // Every decoder has its own looper because MediaCodec operations
+ // are blocking, but NuPlayer needs asynchronous operations.
+ mDecoderLooper = new ALooper;
+ mDecoderLooper->setName("NPDecoder");
+ mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer::DecoderBase::~DecoderBase() {
+ mDecoderLooper->unregisterHandler(id());
+ mDecoderLooper->stop();
+}
+
+static
+status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void NuPlayer::DecoderBase::configure(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
+
+void NuPlayer::DecoderBase::init() {
+ mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer::DecoderBase::setRenderer(const sp<Renderer> &renderer) {
+ sp<AMessage> msg = new AMessage(kWhatSetRenderer, id());
+ msg->setObject("renderer", renderer);
+ msg->post();
+}
+
+status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id());
+ msg->setPointer("buffers", buffers);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+void NuPlayer::DecoderBase::signalFlush() {
+ (new AMessage(kWhatFlush, id()))->post();
+}
+
+void NuPlayer::DecoderBase::signalResume(bool notifyComplete) {
+ sp<AMessage> msg = new AMessage(kWhatResume, id());
+ msg->setInt32("notifyComplete", notifyComplete);
+ msg->post();
+}
+
+void NuPlayer::DecoderBase::initiateShutdown() {
+ (new AMessage(kWhatShutdown, id()))->post();
+}
+
+void NuPlayer::DecoderBase::onRequestInputBuffers() {
+ if (mRequestInputBuffersPending) {
+ return;
+ }
+
+ doRequestBuffers();
+}
+
+void NuPlayer::DecoderBase::scheduleRequestBuffers() {
+ if (mRequestInputBuffersPending) {
+ return;
+ }
+ mRequestInputBuffersPending = true;
+ sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, id());
+ msg->post(10 * 1000ll);
+}
+
+void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {
+
+ switch (msg->what()) {
+ case kWhatConfigure:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ onConfigure(format);
+ break;
+ }
+
+ case kWhatSetRenderer:
+ {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("renderer", &obj));
+ onSetRenderer(static_cast<Renderer *>(obj.get()));
+ break;
+ }
+
+ case kWhatGetInputBuffers:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ Vector<sp<ABuffer> > *dstBuffers;
+ CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
+
+ onGetInputBuffers(dstBuffers);
+
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ case kWhatRequestInputBuffers:
+ {
+ mRequestInputBuffersPending = false;
+ onRequestInputBuffers();
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ onFlush(true);
+ break;
+ }
+
+ case kWhatResume:
+ {
+ int32_t notifyComplete;
+ CHECK(msg->findInt32("notifyComplete", &notifyComplete));
+
+ onResume(notifyComplete);
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ onShutdown(true);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void NuPlayer::DecoderBase::handleError(int32_t err)
+{
+ // We cannot immediately release the codec due to buffers still outstanding
+ // in the renderer. We signal to the player the error so it can shutdown/release the
+ // decoder after flushing and increment the generation to discard unnecessary messages.
+
+ ++mBufferGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
new file mode 100644
index 0000000..6732ff4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_BASE_H_
+
+#define NUPLAYER_DECODER_BASE_H_
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct MediaCodec;
+struct MediaBuffer;
+
+struct NuPlayer::DecoderBase : public AHandler {
+ DecoderBase(const sp<AMessage> &notify);
+
+ void configure(const sp<AMessage> &format);
+ void init();
+
+ void setRenderer(const sp<Renderer> &renderer);
+
+ status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
+ void signalFlush();
+ void signalResume(bool notifyComplete);
+ void initiateShutdown();
+
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const = 0;
+
+ enum {
+ kWhatInputDiscontinuity = 'inDi',
+ kWhatVideoSizeChanged = 'viSC',
+ kWhatFlushCompleted = 'flsC',
+ kWhatShutdownCompleted = 'shDC',
+ kWhatResumeCompleted = 'resC',
+ kWhatEOS = 'eos ',
+ kWhatError = 'err ',
+ };
+
+protected:
+
+ virtual ~DecoderBase();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+ virtual void onConfigure(const sp<AMessage> &format) = 0;
+ virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers) = 0;
+ virtual void onResume(bool notifyComplete) = 0;
+ virtual void onFlush(bool notifyComplete) = 0;
+ virtual void onShutdown(bool notifyComplete) = 0;
+
+ void onRequestInputBuffers();
+ void scheduleRequestBuffers();
+ virtual void doRequestBuffers() = 0;
+ virtual void handleError(int32_t err);
+
+ sp<AMessage> mNotify;
+ int32_t mBufferGeneration;
+
+private:
+ enum {
+ kWhatConfigure = 'conf',
+ kWhatSetRenderer = 'setR',
+ kWhatGetInputBuffers = 'gInB',
+ kWhatRequestInputBuffers = 'reqB',
+ kWhatFlush = 'flus',
+ kWhatShutdown = 'shuD',
+ };
+
+ sp<ALooper> mDecoderLooper;
+ bool mRequestInputBuffersPending;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DecoderBase);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_DECODER_BASE_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index f7aacdd..9f7f09a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -21,79 +21,80 @@
#include "NuPlayerDecoderPassThrough.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
+#include "ATSParser.h"
+
namespace android {
+// TODO optimize buffer size for power consumption
+// The offload read buffer size is 32 KB but 24 KB uses less power.
+static const size_t kAggregateBufferSizeBytes = 24 * 1024;
static const size_t kMaxCachedBytes = 200000;
-// The buffers will contain a bit less than kAggregateBufferSizeBytes.
-// So we can start off with just enough buffers to keep the cache full.
-static const size_t kMaxPendingBuffers = 1 + (kMaxCachedBytes / NuPlayer::kAggregateBufferSizeBytes);
NuPlayer::DecoderPassThrough::DecoderPassThrough(
- const sp<AMessage> &notify)
- : Decoder(notify),
- mNotify(notify),
- mBufferGeneration(0),
+ const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer)
+ : DecoderBase(notify),
+ mSource(source),
+ mRenderer(renderer),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
+ mPaused(false),
mReachedEOS(true),
- mPendingBuffersToFill(0),
+ mPendingAudioErr(OK),
mPendingBuffersToDrain(0),
mCachedBytes(0),
mComponentName("pass through decoder") {
- mDecoderLooper = new ALooper;
- mDecoderLooper->setName("NuPlayerDecoderPassThrough");
- mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ ALOGW_IF(renderer == NULL, "expect a non-NULL renderer");
}
NuPlayer::DecoderPassThrough::~DecoderPassThrough() {
}
-void NuPlayer::DecoderPassThrough::configure(const sp<AMessage> &format) {
- sp<AMessage> msg = new AMessage(kWhatConfigure, id());
- msg->setMessage("format", format);
- msg->post();
-}
-
-void NuPlayer::DecoderPassThrough::init() {
- mDecoderLooper->registerHandler(this);
-}
-
-void NuPlayer::DecoderPassThrough::signalFlush() {
- (new AMessage(kWhatFlush, id()))->post();
-}
-
-void NuPlayer::DecoderPassThrough::signalResume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
-void NuPlayer::DecoderPassThrough::initiateShutdown() {
- (new AMessage(kWhatShutdown, id()))->post();
-}
-
-bool NuPlayer::DecoderPassThrough::supportsSeamlessFormatChange(
- const sp<AMessage> & /* targetFormat */) const {
- return true;
+void NuPlayer::DecoderPassThrough::getStats(
+ int64_t *numFramesTotal, int64_t *numFramesDropped) const {
+ *numFramesTotal = 0;
+ *numFramesDropped = 0;
}
void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
ALOGV("[%s] onConfigure", mComponentName.c_str());
mCachedBytes = 0;
- mPendingBuffersToFill = 0;
mPendingBuffersToDrain = 0;
mReachedEOS = false;
++mBufferGeneration;
- requestMaxBuffers();
+ onRequestInputBuffers();
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
- notify->setMessage("format", format);
- notify->post();
+ // The audio sink is already opened before the PassThrough decoder is created.
+ // Opening again might be relevant if decoder is instantiated after shutdown and
+ // format is different.
+ status_t err = mRenderer->openAudioSink(
+ format, true /* offloadOnly */, false /* hasVideo */,
+ AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */);
+ if (err != OK) {
+ handleError(err);
+ }
+}
+
+void NuPlayer::DecoderPassThrough::onSetRenderer(
+ const sp<Renderer> &renderer) {
+ // renderer can't be changed during offloading
+ ALOGW_IF(renderer != mRenderer,
+ "ignoring request to change renderer");
+}
+
+void NuPlayer::DecoderPassThrough::onGetInputBuffers(
+ Vector<sp<ABuffer> > * /* dstBuffers */) {
+ ALOGE("onGetInputBuffers() called unexpectedly");
}
bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
@@ -102,100 +103,301 @@ bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
return generation != mBufferGeneration;
}
-bool NuPlayer::DecoderPassThrough::requestABuffer() {
- if (mCachedBytes >= kMaxCachedBytes) {
- ALOGV("[%s] mCachedBytes = %zu",
- mComponentName.c_str(), mCachedBytes);
- return false;
+bool NuPlayer::DecoderPassThrough::isDoneFetching() const {
+ ALOGV("[%s] mCachedBytes = %zu, mReachedEOS = %d mPaused = %d",
+ mComponentName.c_str(), mCachedBytes, mReachedEOS, mPaused);
+
+ return mCachedBytes >= kMaxCachedBytes || mReachedEOS || mPaused;
+}
+
+void NuPlayer::DecoderPassThrough::doRequestBuffers() {
+ status_t err = OK;
+ while (!isDoneFetching()) {
+ sp<AMessage> msg = new AMessage();
+
+ err = fetchInputData(msg);
+ if (err != OK) {
+ break;
+ }
+
+ onInputBufferFetched(msg);
}
- if (mReachedEOS) {
- ALOGV("[%s] reached EOS", mComponentName.c_str());
- return false;
+
+ if (err == -EWOULDBLOCK
+ && mSource->feedMoreTSData() == OK) {
+ scheduleRequestBuffers();
}
+}
- sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
- reply->setInt32("generation", mBufferGeneration);
+status_t NuPlayer::DecoderPassThrough::dequeueAccessUnit(sp<ABuffer> *accessUnit) {
+ status_t err;
+
+ // Did we save an accessUnit earlier because of a discontinuity?
+ if (mPendingAudioAccessUnit != NULL) {
+ *accessUnit = mPendingAudioAccessUnit;
+ mPendingAudioAccessUnit.clear();
+ err = mPendingAudioErr;
+ ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
+ } else {
+ err = mSource->dequeueAccessUnit(true /* audio */, accessUnit);
+ }
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatFillThisBuffer);
- notify->setMessage("reply", reply);
- notify->post();
- mPendingBuffersToFill++;
- ALOGV("requestABuffer: #ToFill = %zu, #ToDrain = %zu", mPendingBuffersToFill,
- mPendingBuffersToDrain);
+ if (err == INFO_DISCONTINUITY || err == ERROR_END_OF_STREAM) {
+ if (mAggregateBuffer != NULL) {
+ // We already have some data so save this for later.
+ mPendingAudioErr = err;
+ mPendingAudioAccessUnit = *accessUnit;
+ (*accessUnit).clear();
+ ALOGD("return aggregated buffer and save err(=%d) for later", err);
+ err = OK;
+ }
+ }
- return true;
+ return err;
}
-void android::NuPlayer::DecoderPassThrough::onInputBufferFilled(
+sp<ABuffer> NuPlayer::DecoderPassThrough::aggregateBuffer(
+ const sp<ABuffer> &accessUnit) {
+ sp<ABuffer> aggregate;
+
+ if (accessUnit == NULL) {
+ // accessUnit is saved to mPendingAudioAccessUnit
+ // return current mAggregateBuffer
+ aggregate = mAggregateBuffer;
+ mAggregateBuffer.clear();
+ return aggregate;
+ }
+
+ size_t smallSize = accessUnit->size();
+ if ((mAggregateBuffer == NULL)
+ // Don't bother if only room for a few small buffers.
+ && (smallSize < (kAggregateBufferSizeBytes / 3))) {
+ // Create a larger buffer for combining smaller buffers from the extractor.
+ mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
+ mAggregateBuffer->setRange(0, 0); // start empty
+ }
+
+ if (mAggregateBuffer != NULL) {
+ int64_t timeUs;
+ int64_t dummy;
+ bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
+ bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
+ // Will the smaller buffer fit?
+ size_t bigSize = mAggregateBuffer->size();
+ size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
+ // Should we save this small buffer for the next big buffer?
+ // If the first small buffer did not have a timestamp then save
+ // any buffer that does have a timestamp until the next big buffer.
+ if ((smallSize > roomLeft)
+ || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
+ mPendingAudioErr = OK;
+ mPendingAudioAccessUnit = accessUnit;
+ aggregate = mAggregateBuffer;
+ mAggregateBuffer.clear();
+ } else {
+ // Grab time from first small buffer if available.
+ if ((bigSize == 0) && smallTimestampValid) {
+ mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
+ }
+ // Append small buffer to the bigger buffer.
+ memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
+ bigSize += smallSize;
+ mAggregateBuffer->setRange(0, bigSize);
+
+ ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
+ smallSize, bigSize, mAggregateBuffer->capacity());
+ }
+ } else {
+ // decided not to aggregate
+ aggregate = accessUnit;
+ }
+
+ return aggregate;
+}
+
+status_t NuPlayer::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) {
+ sp<ABuffer> accessUnit;
+
+ do {
+ status_t err = dequeueAccessUnit(&accessUnit);
+
+ if (err == -EWOULDBLOCK) {
+ return err;
+ } else if (err != OK) {
+ if (err == INFO_DISCONTINUITY) {
+ int32_t type;
+ CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+ bool formatChange =
+ (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT) != 0;
+
+ bool timeChange =
+ (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+ ALOGI("audio discontinuity (formatChange=%d, time=%d)",
+ formatChange, timeChange);
+
+ if (formatChange || timeChange) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatInputDiscontinuity);
+ // will perform seamless format change,
+ // only notify NuPlayer to scan sources
+ msg->setInt32("formatChange", false);
+ msg->post();
+ }
+
+ if (timeChange) {
+ onFlush(false /* notifyComplete */);
+ err = OK;
+ } else if (formatChange) {
+ // do seamless format change
+ err = OK;
+ } else {
+ // This stream is unaffected by the discontinuity
+ return -EWOULDBLOCK;
+ }
+ }
+
+ reply->setInt32("err", err);
+ return OK;
+ }
+
+ accessUnit = aggregateBuffer(accessUnit);
+ } while (accessUnit == NULL);
+
+#if 0
+ int64_t mediaTimeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("feeding audio input buffer at media time %.2f secs",
+ mediaTimeUs / 1E6);
+#endif
+
+ reply->setBuffer("buffer", accessUnit);
+
+ return OK;
+}
+
+void NuPlayer::DecoderPassThrough::onInputBufferFetched(
const sp<AMessage> &msg) {
- --mPendingBuffersToFill;
if (mReachedEOS) {
return;
}
sp<ABuffer> buffer;
- msg->findBuffer("buffer", &buffer);
+ bool hasBuffer = msg->findBuffer("buffer", &buffer);
if (buffer == NULL) {
- mReachedEOS = true;
+ int32_t streamErr = ERROR_END_OF_STREAM;
+ CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+ if (streamErr == OK) {
+ return;
+ }
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatEOS);
- notify->setInt32("err", ERROR_END_OF_STREAM);
- notify->post();
+ mReachedEOS = true;
+ if (mRenderer != NULL) {
+ mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
+ }
return;
}
- mCachedBytes += buffer->size();
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
+ int32_t bufferSize = buffer->size();
+ mCachedBytes += bufferSize;
+
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ int64_t timeUs = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
+
+ onBufferConsumed(bufferSize);
+ return;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ if (mRenderer == NULL) {
+ onBufferConsumed(bufferSize);
+ return;
+ }
sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id());
reply->setInt32("generation", mBufferGeneration);
- reply->setInt32("size", buffer->size());
+ reply->setInt32("size", bufferSize);
+
+ mRenderer->queueBuffer(true /* audio */, buffer, reply);
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatDrainThisBuffer);
- notify->setBuffer("buffer", buffer);
- notify->setMessage("reply", reply);
- notify->post();
++mPendingBuffersToDrain;
- ALOGV("onInputBufferFilled: #ToFill = %zu, #ToDrain = %zu, cachedBytes = %zu",
- mPendingBuffersToFill, mPendingBuffersToDrain, mCachedBytes);
+ ALOGV("onInputBufferFilled: #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToDrain, mCachedBytes);
}
void NuPlayer::DecoderPassThrough::onBufferConsumed(int32_t size) {
--mPendingBuffersToDrain;
mCachedBytes -= size;
- ALOGV("onBufferConsumed: #ToFill = %zu, #ToDrain = %zu, cachedBytes = %zu",
- mPendingBuffersToFill, mPendingBuffersToDrain, mCachedBytes);
- requestABuffer();
+ ALOGV("onBufferConsumed: #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToDrain, mCachedBytes);
+ onRequestInputBuffers();
+}
+
+void NuPlayer::DecoderPassThrough::onResume(bool notifyComplete) {
+ mPaused = false;
+
+ onRequestInputBuffers();
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatResumeCompleted);
+ notify->post();
+ }
}
-void NuPlayer::DecoderPassThrough::onFlush() {
+void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) {
++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
+ mPendingAudioAccessUnit.clear();
+ mPendingAudioErr = OK;
+ mAggregateBuffer.clear();
+
+ if (mRenderer != NULL) {
+ mRenderer->flush(true /* audio */, notifyComplete);
+ mRenderer->signalTimeDiscontinuity();
+ }
+
+ if (notifyComplete) {
+ mPaused = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ }
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatFlushCompleted);
- notify->post();
- mPendingBuffersToFill = 0;
mPendingBuffersToDrain = 0;
mCachedBytes = 0;
mReachedEOS = false;
}
-void NuPlayer::DecoderPassThrough::requestMaxBuffers() {
- for (size_t i = 0; i < kMaxPendingBuffers; i++) {
- if (!requestABuffer()) {
- break;
- }
- }
-}
-
-void NuPlayer::DecoderPassThrough::onShutdown() {
+void NuPlayer::DecoderPassThrough::onShutdown(bool notifyComplete) {
++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ }
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatShutdownCompleted);
- notify->post();
mReachedEOS = true;
}
@@ -204,31 +406,6 @@ void NuPlayer::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
msg->debugString().c_str());
switch (msg->what()) {
- case kWhatConfigure:
- {
- sp<AMessage> format;
- CHECK(msg->findMessage("format", &format));
- onConfigure(format);
- break;
- }
-
- case kWhatRequestABuffer:
- {
- if (!isStaleReply(msg)) {
- requestABuffer();
- }
-
- break;
- }
-
- case kWhatInputBufferFilled:
- {
- if (!isStaleReply(msg)) {
- onInputBufferFilled(msg);
- }
- break;
- }
-
case kWhatBufferConsumed:
{
if (!isStaleReply(msg)) {
@@ -239,26 +416,8 @@ void NuPlayer::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatFlush:
- {
- onFlush();
- break;
- }
-
- case kWhatResume:
- {
- requestMaxBuffers();
- break;
- }
-
- case kWhatShutdown:
- {
- onShutdown();
- break;
- }
-
default:
- TRESPASS();
+ DecoderBase::onMessageReceived(msg);
break;
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
index fb20257..a6e1faf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -20,21 +20,18 @@
#include "NuPlayer.h"
-#include "NuPlayerDecoder.h"
+#include "NuPlayerDecoderBase.h"
namespace android {
-struct NuPlayer::DecoderPassThrough : public Decoder {
- DecoderPassThrough(const sp<AMessage> &notify);
+struct NuPlayer::DecoderPassThrough : public DecoderBase {
+ DecoderPassThrough(const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer);
- virtual void configure(const sp<AMessage> &format);
- virtual void init();
-
- virtual void signalFlush();
- virtual void signalResume();
- virtual void initiateShutdown();
-
- bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const;
protected:
@@ -42,41 +39,48 @@ protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual void onConfigure(const sp<AMessage> &format);
+ virtual void onSetRenderer(const sp<Renderer> &renderer);
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onResume(bool notifyComplete);
+ virtual void onFlush(bool notifyComplete);
+ virtual void onShutdown(bool notifyComplete);
+ virtual void doRequestBuffers();
+
private:
enum {
- kWhatRequestABuffer = 'reqB',
- kWhatConfigure = 'conf',
- kWhatInputBufferFilled = 'inpF',
kWhatBufferConsumed = 'bufC',
- kWhatFlush = 'flus',
- kWhatShutdown = 'shuD',
};
- sp<AMessage> mNotify;
- sp<ALooper> mDecoderLooper;
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+ int64_t mSkipRenderingUntilMediaTimeUs;
+ bool mPaused;
- /** Returns true if a buffer was requested.
- * Returns false if at EOS or cache already full.
- */
- bool requestABuffer();
- bool isStaleReply(const sp<AMessage> &msg);
+ bool mReachedEOS;
- void onConfigure(const sp<AMessage> &format);
- void onFlush();
- void onInputBufferFilled(const sp<AMessage> &msg);
- void onBufferConsumed(int32_t size);
- void requestMaxBuffers();
- void onShutdown();
+ // Used by feedDecoderInputData to aggregate small buffers into
+ // one large buffer.
+ sp<ABuffer> mPendingAudioAccessUnit;
+ status_t mPendingAudioErr;
+ sp<ABuffer> mAggregateBuffer;
- int32_t mBufferGeneration;
- bool mReachedEOS;
- // TODO mPendingBuffersToFill and mPendingBuffersToDrain are only for
- // debugging. They can be removed when the power investigation is done.
- size_t mPendingBuffersToFill;
+ // mPendingBuffersToDrain are only for debugging. It can be removed
+ // when the power investigation is done.
size_t mPendingBuffersToDrain;
size_t mCachedBytes;
AString mComponentName;
+ bool isStaleReply(const sp<AMessage> &msg);
+ bool isDoneFetching() const;
+
+ status_t dequeueAccessUnit(sp<ABuffer> *accessUnit);
+ sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit);
+ status_t fetchInputData(sp<AMessage> &reply);
+
+ void onInputBufferFetched(const sp<AMessage> &msg);
+ void onBufferConsumed(int32_t size);
+
DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index ab46074..d050c78 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -239,16 +239,24 @@ status_t NuPlayerDriver::start() {
// fall through
}
+ case STATE_PAUSED:
+ case STATE_STOPPED_AND_PREPARED:
+ {
+ if (mAtEOS && mStartupSeekTimeUs < 0) {
+ mStartupSeekTimeUs = 0;
+ mPositionUs = -1;
+ }
+
+ // fall through
+ }
+
case STATE_PREPARED:
{
mAtEOS = false;
mPlayer->start();
if (mStartupSeekTimeUs >= 0) {
- if (mStartupSeekTimeUs > 0) {
- mPlayer->seekToAsync(mStartupSeekTimeUs);
- }
-
+ mPlayer->seekToAsync(mStartupSeekTimeUs);
mStartupSeekTimeUs = -1;
}
break;
@@ -264,20 +272,6 @@ status_t NuPlayerDriver::start() {
break;
}
- case STATE_PAUSED:
- case STATE_STOPPED_AND_PREPARED:
- {
- if (mAtEOS) {
- mPlayer->seekToAsync(0);
- mAtEOS = false;
- mPlayer->resume();
- mPositionUs = -1;
- } else {
- mPlayer->resume();
- }
- break;
- }
-
default:
return INVALID_OPERATION;
}
@@ -316,6 +310,13 @@ status_t NuPlayerDriver::stop() {
}
status_t NuPlayerDriver::pause() {
+ // The NuPlayerRenderer may get flushed if pause for long enough, e.g. the pause timeout tear
+ // down for audio offload mode. If that happens, the NuPlayerRenderer will no longer know the
+ // current position. So similar to seekTo, update |mPositionUs| to the pause position by calling
+ // getCurrentPosition here.
+ int msec;
+ getCurrentPosition(&msec);
+
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -348,6 +349,7 @@ status_t NuPlayerDriver::seekTo(int msec) {
switch (mState) {
case STATE_PREPARED:
+ case STATE_STOPPED_AND_PREPARED:
{
mStartupSeekTimeUs = seekTimeUs;
// pretend that the seek completed. It will actually happen when starting playback.
@@ -485,13 +487,16 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
case INVOKE_ID_SELECT_TRACK:
{
int trackIndex = request.readInt32();
- return mPlayer->selectTrack(trackIndex, true /* select */);
+ int msec = 0;
+ // getCurrentPosition should always return OK
+ getCurrentPosition(&msec);
+ return mPlayer->selectTrack(trackIndex, true /* select */, msec * 1000ll);
}
case INVOKE_ID_UNSELECT_TRACK:
{
int trackIndex = request.readInt32();
- return mPlayer->selectTrack(trackIndex, false /* select */);
+ return mPlayer->selectTrack(trackIndex, false /* select */, 0xdeadbeef /* not used */);
}
case INVOKE_ID_GET_SELECTED_TRACK:
@@ -630,9 +635,24 @@ void NuPlayerDriver::notifyListener_l(
case MEDIA_PLAYBACK_COMPLETE:
{
if (mState != STATE_RESET_IN_PROGRESS) {
+ if (mAutoLoop) {
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+ if (streamType == AUDIO_STREAM_NOTIFICATION) {
+ ALOGW("disabling auto-loop for notification");
+ mAutoLoop = false;
+ }
+ }
if (mLooping || (mAutoLoop
&& (mAudioSink == NULL || mAudioSink->realtime()))) {
mPlayer->seekToAsync(0);
+ if (mAudioSink != NULL) {
+ // The renderer has stopped the sink at the end in order to play out
+ // the last little bit of audio. If we're looping, we need to restart it.
+ mAudioSink->start();
+ }
break;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 638d9bc..21b74ee 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,8 +20,6 @@
#include "NuPlayerRenderer.h"
-#include <cutils/properties.h>
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -38,21 +36,11 @@ namespace android {
// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
// is closed to allow the audio DSP to power down.
-static const int64_t kOffloadPauseMaxUs = 60000000ll;
+static const int64_t kOffloadPauseMaxUs = 10000000ll;
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
-static bool sFrameAccurateAVsync = false;
-
-static void readProperties() {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("persist.sys.media.avsync", value, NULL)) {
- sFrameAccurateAVsync =
- !strcmp("1", value) || !strcasecmp("true", value);
- }
-}
-
NuPlayer::Renderer::Renderer(
const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
@@ -68,12 +56,16 @@ NuPlayer::Renderer::Renderer(
mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
+ mAnchorNumFramesWritten(-1),
+ mAnchorMaxMediaUs(-1),
mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
mPauseStartedTimeRealUs(-1),
mFlushingAudio(false),
mFlushingVideo(false),
+ mNotifyCompleteAudio(false),
+ mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
mVideoSampleReceived(false),
@@ -85,7 +77,6 @@ NuPlayer::Renderer::Renderer(
mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0) {
- readProperties();
}
NuPlayer::Renderer::~Renderer() {
@@ -116,15 +107,17 @@ void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
msg->post();
}
-void NuPlayer::Renderer::flush(bool audio) {
+void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
+ mNotifyCompleteAudio |= notifyComplete;
if (mFlushingAudio) {
return;
}
mFlushingAudio = true;
} else {
+ mNotifyCompleteVideo |= notifyComplete;
if (mFlushingVideo) {
return;
}
@@ -155,6 +148,10 @@ void NuPlayer::Renderer::signalDisableOffloadAudio() {
(new AMessage(kWhatDisableOffloadAudio, id()))->post();
}
+void NuPlayer::Renderer::signalEnableOffloadAudio() {
+ (new AMessage(kWhatEnableOffloadAudio, id()))->post();
+}
+
void NuPlayer::Renderer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
@@ -173,7 +170,8 @@ status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
return getCurrentPosition(mediaUs, ALooper::GetNowUs());
}
-status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) {
+status_t NuPlayer::Renderer::getCurrentPosition(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
Mutex::Autolock autoLock(mTimeLock);
if (!mHasAudio && !mHasVideo) {
return NO_INIT;
@@ -182,12 +180,21 @@ status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs)
if (mAnchorTimeMediaUs < 0) {
return NO_INIT;
}
+
int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
if (mPauseStartedTimeRealUs != -1) {
positionUs -= (nowUs - mPauseStartedTimeRealUs);
}
+ // limit position to the last queued media time (for video only stream
+ // position will be discrete as we don't know how long each frame lasts)
+ if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
+ if (positionUs > mAnchorMaxMediaUs) {
+ positionUs = mAnchorMaxMediaUs;
+ }
+ }
+
if (positionUs < mAudioFirstAnchorTimeMediaUs) {
positionUs = mAudioFirstAnchorTimeMediaUs;
}
@@ -217,10 +224,12 @@ void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
}
}
-void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) {
+void NuPlayer::Renderer::setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
Mutex::Autolock autoLock(mTimeLock);
mAnchorTimeMediaUs = mediaUs;
mAnchorTimeRealUs = realUs;
+ mAnchorNumFramesWritten = numFramesWritten;
if (resume) {
mPauseStartedTimeRealUs = -1;
}
@@ -241,11 +250,12 @@ void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
mPauseStartedTimeRealUs = realUs;
}
-bool NuPlayer::Renderer::openAudioSink(
+status_t NuPlayer::Renderer::openAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
bool hasVideo,
- uint32_t flags) {
+ uint32_t flags,
+ bool *isOffloaded) {
sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
msg->setMessage("format", format);
msg->setInt32("offload-only", offloadOnly);
@@ -255,9 +265,15 @@ bool NuPlayer::Renderer::openAudioSink(
sp<AMessage> response;
msg->postAndAwaitResponse(&response);
- int32_t offload;
- CHECK(response->findInt32("offload", &offload));
- return (offload != 0);
+ int32_t err;
+ if (!response->findInt32("err", &err)) {
+ err = INVALID_OPERATION;
+ } else if (err == OK && isOffloaded != NULL) {
+ int32_t offload;
+ CHECK(response->findInt32("offload", &offload));
+ *isOffloaded = (offload != 0);
+ }
+ return err;
}
void NuPlayer::Renderer::closeAudioSink() {
@@ -283,10 +299,11 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t flags;
CHECK(msg->findInt32("flags", (int32_t *)&flags));
- bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
+ status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
sp<AMessage> response = new AMessage;
- response->setInt32("offload", offload);
+ response->setInt32("err", err);
+ response->setInt32("offload", offloadingAudio());
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
@@ -406,6 +423,12 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatEnableOffloadAudio:
+ {
+ onEnableOffloadAudio();
+ break;
+ }
+
case kWhatPause:
{
onPause();
@@ -541,7 +564,7 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
- onNewAudioMediaTime(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -564,6 +587,14 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
notifyIfMediaRenderingStarted();
}
+ if (mAudioFirstAnchorTimeMediaUs >= 0) {
+ int64_t nowUs = ALooper::GetNowUs();
+ setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
+ }
+
+ // we don't know how much data we are queueing for offloaded tracks
+ mAnchorMaxMediaUs = -1;
+
if (hasEOS) {
(new AMessage(kWhatStopAudioSink, id()))->post();
}
@@ -607,6 +638,10 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
+ // Need to stop the track here, because that will play out the last
+ // little bit at the end of the file. Otherwise short files won't play.
+ mAudioSink->stop();
+ mNumFramesWritten = 0;
return false;
}
@@ -624,8 +659,9 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
if (written < 0) {
- // An error in AudioSink write is fatal here.
- LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
+ ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ break;
}
entry->mOffset += written;
@@ -663,6 +699,11 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
break;
}
}
+ mAnchorMaxMediaUs =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
+
return !mAudioQueue.empty();
}
@@ -674,7 +715,7 @@ int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
int64_t currentPositionUs;
- if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
+ if (getCurrentPosition(&currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
// If failed to get current position, e.g. due to audio clock is not ready, then just
// play out video immediately without delay.
return nowUs;
@@ -690,7 +731,8 @@ void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
}
setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs));
+ setAnchorTime(
+ mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
}
void NuPlayer::Renderer::postDrainVideoQueue() {
@@ -729,10 +771,14 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
if (mAnchorTimeMediaUs < 0) {
setAnchorTime(mediaTimeUs, nowUs);
+ mAnchorMaxMediaUs = mediaTimeUs;
realTimeUs = nowUs;
} else {
realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
}
+ if (!mHasAudio) {
+ mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
+ }
// Heuristics to handle situation when media time changed without a
// discontinuity. If we have not drained an audio buffer that was
@@ -761,11 +807,6 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
// post 2 display refreshes before rendering is due
- // FIXME currently this increases power consumption, so unless frame-accurate
- // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
- if (!sFrameAccurateAVsync) {
- twoVsyncsUs >>= 4;
- }
msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
mDrainVideoQueuePending = true;
@@ -988,15 +1029,19 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
}
void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
- int32_t audio;
+ int32_t audio, notifyComplete;
CHECK(msg->findInt32("audio", &audio));
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
mFlushingAudio = false;
+ notifyComplete = mNotifyCompleteAudio;
+ mNotifyCompleteAudio = false;
} else {
mFlushingVideo = false;
+ notifyComplete = mNotifyCompleteVideo;
+ mNotifyCompleteVideo = false;
}
}
@@ -1012,6 +1057,7 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
Mutex::Autolock autoLock(mLock);
syncQueuesDone_l();
setPauseStartedTimeRealUs(-1);
+ setAnchorTime(-1, -1);
}
ALOGV("flushing %s", audio ? "audio" : "video");
@@ -1049,7 +1095,10 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
}
mVideoSampleReceived = false;
- notifyFlushComplete(audio);
+
+ if (notifyComplete) {
+ notifyFlushComplete(audio);
+ }
}
void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
@@ -1103,6 +1152,7 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
+ mAnchorNumFramesWritten = -1;
uint32_t written;
if (mAudioSink->getFramesWritten(&written) == OK) {
mNumFramesWritten = written;
@@ -1115,6 +1165,12 @@ void NuPlayer::Renderer::onDisableOffloadAudio() {
++mAudioQueueGeneration;
}
+void NuPlayer::Renderer::onEnableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags |= FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
+}
+
void NuPlayer::Renderer::onPause() {
if (mPaused) {
ALOGW("Renderer::onPause() called while already paused!");
@@ -1142,8 +1198,6 @@ void NuPlayer::Renderer::onPause() {
}
void NuPlayer::Renderer::onResume() {
- readProperties();
-
if (!mPaused) {
return;
}
@@ -1158,7 +1212,8 @@ void NuPlayer::Renderer::onResume() {
if (mPauseStartedTimeRealUs != -1) {
int64_t newAnchorRealUs =
mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
- setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */);
+ setAnchorTime(
+ mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
}
if (!mAudioQueue.empty()) {
@@ -1276,7 +1331,7 @@ void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
}
}
-bool NuPlayer::Renderer::onOpenAudioSink(
+status_t NuPlayer::Renderer::onOpenAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
bool hasVideo,
@@ -1338,11 +1393,12 @@ bool NuPlayer::Renderer::onOpenAudioSink(
if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
ALOGV("openAudioSink: no change in offload mode");
// no change from previous configuration, everything ok.
- return offloadingAudio();
+ return OK;
}
ALOGV("openAudioSink: try to open AudioSink in offload mode");
- flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
- flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ uint32_t offloadFlags = flags;
+ offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
audioSinkChanged = true;
mAudioSink->close();
err = mAudioSink->open(
@@ -1353,7 +1409,7 @@ bool NuPlayer::Renderer::onOpenAudioSink(
8 /* bufferCount */,
&NuPlayer::Renderer::AudioSinkCallback,
this,
- (audio_output_flags_t)flags,
+ (audio_output_flags_t)offloadFlags,
&offloadInfo);
if (err == OK) {
@@ -1377,13 +1433,13 @@ bool NuPlayer::Renderer::onOpenAudioSink(
}
}
if (!offloadOnly && !offloadingAudio()) {
- flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
ALOGV("openAudioSink: open AudioSink in NON-offload mode");
-
+ uint32_t pcmFlags = flags;
+ pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
audioSinkChanged = true;
mAudioSink->close();
mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
- CHECK_EQ(mAudioSink->open(
+ status_t err = mAudioSink->open(
sampleRate,
numChannels,
(audio_channel_mask_t)channelMask,
@@ -1391,15 +1447,20 @@ bool NuPlayer::Renderer::onOpenAudioSink(
8 /* bufferCount */,
NULL,
NULL,
- (audio_output_flags_t)flags),
- (status_t)OK);
+ (audio_output_flags_t)pcmFlags);
+ if (err != OK) {
+ ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+ return err;
+ }
mAudioSink->start();
}
if (audioSinkChanged) {
onAudioSinkChanged();
}
-
- return offloadingAudio();
+ if (offloadingAudio()) {
+ mAudioOffloadTornDown = false;
+ }
+ return OK;
}
void NuPlayer::Renderer::onCloseAudioSink() {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index b15a266..406c64c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -46,13 +46,14 @@ struct NuPlayer::Renderer : public AHandler {
void queueEOS(bool audio, status_t finalResult);
- void flush(bool audio);
+ void flush(bool audio, bool notifyComplete);
void signalTimeDiscontinuity();
void signalAudioSinkChanged();
void signalDisableOffloadAudio();
+ void signalEnableOffloadAudio();
void pause();
void resume();
@@ -61,20 +62,23 @@ struct NuPlayer::Renderer : public AHandler {
// Following setters and getters are protected by mTimeLock.
status_t getCurrentPosition(int64_t *mediaUs);
- status_t getCurrentPosition(int64_t *mediaUs, int64_t nowUs);
+ status_t getCurrentPosition(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
void setHasMedia(bool audio);
void setAudioFirstAnchorTime(int64_t mediaUs);
void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
- void setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume = false);
+ void setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false);
void setVideoLateByUs(int64_t lateUs);
int64_t getVideoLateByUs();
void setPauseStartedTimeRealUs(int64_t realUs);
- bool openAudioSink(
+ status_t openAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
bool hasVideo,
- uint32_t flags);
+ uint32_t flags,
+ bool *isOffloaded);
void closeAudioSink();
enum {
@@ -112,6 +116,7 @@ private:
kWhatCloseAudioSink = 'clsA',
kWhatStopAudioSink = 'stpA',
kWhatDisableOffloadAudio = 'noOA',
+ kWhatEnableOffloadAudio = 'enOA',
kWhatSetVideoFrameRate = 'sVFR',
};
@@ -148,6 +153,8 @@ private:
int64_t mAudioFirstAnchorTimeMediaUs;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
+ int64_t mAnchorNumFramesWritten;
+ int64_t mAnchorMaxMediaUs;
int64_t mVideoLateByUs;
bool mHasAudio;
bool mHasVideo;
@@ -156,6 +163,8 @@ private:
Mutex mFlushLock; // protects the following 2 member vars.
bool mFlushingAudio;
bool mFlushingVideo;
+ bool mNotifyCompleteAudio;
+ bool mNotifyCompleteVideo;
bool mSyncQueues;
@@ -174,6 +183,7 @@ private:
int32_t mTotalBuffersQueued;
int32_t mLastAudioBufferDrained;
+
size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
@@ -195,11 +205,12 @@ private:
void onFlush(const sp<AMessage> &msg);
void onAudioSinkChanged();
void onDisableOffloadAudio();
+ void onEnableOffloadAudio();
void onPause();
void onResume();
void onSetVideoFrameRate(float fps);
void onAudioOffloadTearDown(AudioOffloadTearDownReason reason);
- bool onOpenAudioSink(
+ status_t onOpenAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
bool hasVideo,
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 9924f18..e92cd36 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -96,7 +96,7 @@ struct NuPlayer::Source : public AHandler {
return INVALID_OPERATION;
}
- virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */) {
+ virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */, int64_t /* timeUs*/) {
return INVALID_OPERATION;
}
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index ffacb8f..0282a9f 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -50,7 +50,7 @@ NuPlayer::RTSPSource::RTSPSource(
mState(DISCONNECTED),
mFinalResult(OK),
mDisconnectReplyID(0),
- mBuffering(true),
+ mBuffering(false),
mSeekGeneration(0),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0) {
@@ -106,9 +106,7 @@ void NuPlayer::RTSPSource::prepareAsync() {
mHandler->connect();
}
- sp<AMessage> notifyStart = dupNotify();
- notifyStart->setInt32("what", kWhatBufferingStart);
- notifyStart->post();
+ startBufferingIfNecessary();
}
void NuPlayer::RTSPSource::start() {
@@ -144,6 +142,7 @@ void NuPlayer::RTSPSource::resume() {
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
+ Mutex::Autolock _l(mBufferingLock);
return mFinalResult;
}
@@ -195,16 +194,8 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() {
status_t NuPlayer::RTSPSource::dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) {
- if (mBuffering) {
- if (!haveSufficientDataOnAllTracks()) {
- return -EWOULDBLOCK;
- }
-
- mBuffering = false;
-
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatBufferingEnd);
- notify->post();
+ if (!stopBufferingIfNecessary()) {
+ return -EWOULDBLOCK;
}
sp<AnotherPacketSource> source = getSource(audio);
@@ -246,11 +237,7 @@ status_t NuPlayer::RTSPSource::dequeueAccessUnit(
if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
// We should not enter buffering mode
// if any of the sources already have detected EOS.
- mBuffering = true;
-
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatBufferingStart);
- notify->post();
+ startBufferingIfNecessary();
}
return -EWOULDBLOCK;
@@ -506,7 +493,7 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
source->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK,
+ ATSParser::DISCONTINUITY_TIME,
NULL,
true /* discard */);
}
@@ -630,7 +617,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
}
mState = DISCONNECTED;
- mFinalResult = err;
+ setError(err);
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
@@ -657,7 +644,7 @@ void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
}
mState = DISCONNECTED;
- mFinalResult = err;
+ setError(err);
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
@@ -678,4 +665,40 @@ void NuPlayer::RTSPSource::finishDisconnectIfPossible() {
mDisconnectReplyID = 0;
}
+void NuPlayer::RTSPSource::setError(status_t err) {
+ Mutex::Autolock _l(mBufferingLock);
+ mFinalResult = err;
+}
+
+void NuPlayer::RTSPSource::startBufferingIfNecessary() {
+ Mutex::Autolock _l(mBufferingLock);
+
+ if (!mBuffering) {
+ mBuffering = true;
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatBufferingStart);
+ notify->post();
+ }
+}
+
+bool NuPlayer::RTSPSource::stopBufferingIfNecessary() {
+ Mutex::Autolock _l(mBufferingLock);
+
+ if (mBuffering) {
+ if (!haveSufficientDataOnAllTracks()) {
+ return false;
+ }
+
+ mBuffering = false;
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatBufferingEnd);
+ notify->post();
+ }
+
+ return true;
+}
+
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index f1cae53..ac3299a 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -97,6 +97,7 @@ private:
State mState;
status_t mFinalResult;
uint32_t mDisconnectReplyID;
+ Mutex mBufferingLock;
bool mBuffering;
sp<ALooper> mLooper;
@@ -126,6 +127,9 @@ private:
bool haveSufficientDataOnAllTracks();
void setEOSTimeout(bool audio, int64_t timeout);
+ void setError(status_t err);
+ void startBufferingIfNecessary();
+ bool stopBufferingIfNecessary();
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 2e9a29a..b3f224d 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -37,13 +37,26 @@ NuPlayer::StreamingSource::StreamingSource(
const sp<IStreamSource> &source)
: Source(notify),
mSource(source),
- mFinalResult(OK) {
+ mFinalResult(OK),
+ mBuffering(false) {
}
NuPlayer::StreamingSource::~StreamingSource() {
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
}
void NuPlayer::StreamingSource::prepareAsync() {
+ if (mLooper == NULL) {
+ mLooper = new ALooper;
+ mLooper->setName("streaming");
+ mLooper->start();
+
+ mLooper->registerHandler(this);
+ }
+
notifyVideoSizeChanged();
notifyFlagsChanged(0);
notifyPrepared();
@@ -62,13 +75,15 @@ void NuPlayer::StreamingSource::start() {
mTSParser = new ATSParser(parserFlags);
mStreamListener->start();
+
+ postReadBuffer();
}
status_t NuPlayer::StreamingSource::feedMoreTSData() {
- if (mFinalResult != OK) {
- return mFinalResult;
- }
+ return postReadBuffer();
+}
+void NuPlayer::StreamingSource::onReadBuffer() {
for (int32_t i = 0; i < 50; ++i) {
char buffer[188];
sp<AMessage> extra;
@@ -77,10 +92,10 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
if (n == 0) {
ALOGI("input data EOS reached.");
mTSParser->signalEOS(ERROR_END_OF_STREAM);
- mFinalResult = ERROR_END_OF_STREAM;
+ setError(ERROR_END_OF_STREAM);
break;
} else if (n == INFO_DISCONTINUITY) {
- int32_t type = ATSParser::DISCONTINUITY_SEEK;
+ int32_t type = ATSParser::DISCONTINUITY_TIME;
int32_t mask;
if (extra != NULL
@@ -88,7 +103,8 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
IStreamListener::kKeyDiscontinuityMask, &mask)) {
if (mask == 0) {
ALOGE("Client specified an illegal discontinuity type.");
- return ERROR_UNSUPPORTED;
+ setError(ERROR_UNSUPPORTED);
+ break;
}
type = mask;
@@ -97,7 +113,6 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mTSParser->signalDiscontinuity(
(ATSParser::DiscontinuityType)type, extra);
} else if (n < 0) {
- CHECK_EQ(n, -EWOULDBLOCK);
break;
} else {
if (buffer[0] == 0x00) {
@@ -118,7 +133,7 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mTSParser->signalDiscontinuity(
((type & 1) == 0)
- ? ATSParser::DISCONTINUITY_SEEK
+ ? ATSParser::DISCONTINUITY_TIME
: ATSParser::DISCONTINUITY_FORMATCHANGE,
extra);
} else {
@@ -128,22 +143,80 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
ALOGE("TS Parser returned error %d", err);
mTSParser->signalEOS(err);
- mFinalResult = err;
+ setError(err);
break;
}
}
}
}
+}
+
+status_t NuPlayer::StreamingSource::postReadBuffer() {
+ {
+ Mutex::Autolock _l(mBufferingLock);
+ if (mFinalResult != OK) {
+ return mFinalResult;
+ }
+ if (mBuffering) {
+ return OK;
+ }
+ mBuffering = true;
+ }
+ (new AMessage(kWhatReadBuffer, id()))->post();
return OK;
}
-sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
- ATSParser::SourceType type =
- audio ? ATSParser::AUDIO : ATSParser::VIDEO;
+bool NuPlayer::StreamingSource::haveSufficientDataOnAllTracks() {
+ // We're going to buffer at least 2 secs worth data on all tracks before
+ // starting playback (both at startup and after a seek).
- sp<AnotherPacketSource> source =
- static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+ static const int64_t kMinDurationUs = 2000000ll;
+
+ sp<AnotherPacketSource> audioTrack = getSource(true /*audio*/);
+ sp<AnotherPacketSource> videoTrack = getSource(false /*audio*/);
+
+ status_t err;
+ int64_t durationUs;
+ if (audioTrack != NULL
+ && (durationUs = audioTrack->getBufferedDurationUs(&err))
+ < kMinDurationUs
+ && err == OK) {
+ ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+ durationUs / 1E6);
+ return false;
+ }
+
+ if (videoTrack != NULL
+ && (durationUs = videoTrack->getBufferedDurationUs(&err))
+ < kMinDurationUs
+ && err == OK) {
+ ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+ durationUs / 1E6);
+ return false;
+ }
+
+ return true;
+}
+
+void NuPlayer::StreamingSource::setError(status_t err) {
+ Mutex::Autolock _l(mBufferingLock);
+ mFinalResult = err;
+}
+
+sp<AnotherPacketSource> NuPlayer::StreamingSource::getSource(bool audio) {
+ if (mTSParser == NULL) {
+ return NULL;
+ }
+
+ sp<MediaSource> source = mTSParser->getSource(
+ audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+ return static_cast<AnotherPacketSource *>(source.get());
+}
+
+sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return NULL;
@@ -154,16 +227,16 @@ sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
status_t NuPlayer::StreamingSource::dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) {
- ATSParser::SourceType type =
- audio ? ATSParser::AUDIO : ATSParser::VIDEO;
-
- sp<AnotherPacketSource> source =
- static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+ sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return -EWOULDBLOCK;
}
+ if (!haveSufficientDataOnAllTracks()) {
+ postReadBuffer();
+ }
+
status_t finalResult;
if (!source->hasBufferAvailable(&finalResult)) {
return finalResult == OK ? -EWOULDBLOCK : finalResult;
@@ -186,5 +259,26 @@ bool NuPlayer::StreamingSource::isRealTime() const {
return mSource->flags() & IStreamSource::kFlagIsRealTimeData;
}
+void NuPlayer::StreamingSource::onMessageReceived(
+ const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatReadBuffer:
+ {
+ onReadBuffer();
+
+ {
+ Mutex::Autolock _l(mBufferingLock);
+ mBuffering = false;
+ }
+ break;
+ }
+ default:
+ {
+ TRESPASS();
+ }
+ }
+}
+
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
index 412b6c4..1f95f3c 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.h
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -25,6 +25,7 @@ namespace android {
struct ABuffer;
struct ATSParser;
+struct AnotherPacketSource;
struct NuPlayer::StreamingSource : public NuPlayer::Source {
StreamingSource(
@@ -43,14 +44,29 @@ struct NuPlayer::StreamingSource : public NuPlayer::Source {
protected:
virtual ~StreamingSource();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
virtual sp<MetaData> getFormatMeta(bool audio);
private:
+ enum {
+ kWhatReadBuffer,
+ };
sp<IStreamSource> mSource;
status_t mFinalResult;
sp<NuPlayerStreamListener> mStreamListener;
sp<ATSParser> mTSParser;
+ bool mBuffering;
+ Mutex mBufferingLock;
+ sp<ALooper> mLooper;
+
+ void setError(status_t err);
+ sp<AnotherPacketSource> getSource(bool audio);
+ bool haveSufficientDataOnAllTracks();
+ status_t postReadBuffer();
+ void onReadBuffer();
+
DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
};
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index e9d10cd..653d16c 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -498,6 +498,10 @@ void ACodec::initiateShutdown(bool keepComponentAllocated) {
sp<AMessage> msg = new AMessage(kWhatShutdown, id());
msg->setInt32("keepComponentAllocated", keepComponentAllocated);
msg->post();
+ if (!keepComponentAllocated) {
+ // ensure shutdown completes in 3 seconds
+ (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000);
+ }
}
void ACodec::signalRequestIDRFrame() {
@@ -1153,7 +1157,7 @@ status_t ACodec::configureCodec(
}
sp<AMessage> inputFormat = new AMessage();
- sp<AMessage> outputFormat = new AMessage();
+ sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged
mIsEncoder = encoder;
@@ -1263,6 +1267,24 @@ status_t ACodec::configureCodec(
static_cast<NativeWindowWrapper *>(obj.get()));
sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
+ // START of temporary support for automatic FRC - THIS WILL BE REMOVED
+ int32_t autoFrc;
+ if (msg->findInt32("auto-frc", &autoFrc)) {
+ bool enabled = autoFrc;
+ OMX_CONFIG_BOOLEANTYPE config;
+ InitOMXParams(&config);
+ config.bEnabled = (OMX_BOOL)enabled;
+ status_t temp = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
+ &config, sizeof(config));
+ if (temp == OK) {
+ outputFormat->setInt32("auto-frc", enabled);
+ } else if (enabled) {
+ ALOGI("codec does not support requested auto-frc (err %d)", temp);
+ }
+ }
+ // END of temporary support for automatic FRC
+
int32_t tunneled;
if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
tunneled != 0) {
@@ -1520,6 +1542,8 @@ status_t ACodec::configureCodec(
err = setMinBufferSize(kPortIndexInput, 8192); // XXX
}
+ mBaseOutputFormat = outputFormat;
+
CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
mInputFormat = inputFormat;
@@ -2216,7 +2240,11 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
- video_def->eColorFormat = colorFormat;
+ // this is redundant as it was already set up in setVideoPortFormatType
+ // FIXME for now skip this only for flexible YUV formats
+ if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
+ video_def->eColorFormat = colorFormat;
+ }
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
@@ -3505,7 +3533,7 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify) {
}
void ACodec::sendFormatChange(const sp<AMessage> &reply) {
- sp<AMessage> notify = mNotify->dup();
+ sp<AMessage> notify = mBaseOutputFormat->dup();
notify->setInt32("what", kWhatOutputFormatChanged);
CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK);
@@ -3794,6 +3822,19 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ ALOGI("[%s] forcing the release of codec",
+ mCodec->mComponentName.c_str());
+ status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
+ ALOGE_IF("[%s] failed to release codec instance: err=%d",
+ mCodec->mComponentName.c_str(), err);
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ break;
+ }
+
default:
return false;
}
@@ -4196,7 +4237,8 @@ bool ACodec::BaseState::onOMXFillBufferDone(
case RESUBMIT_BUFFERS:
{
- if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) {
+ if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
+ || mCodec->mPortEOS[kPortIndexOutput])) {
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
@@ -4453,6 +4495,13 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ // nothing to do, as we have already signaled shutdown
+ handled = true;
+ break;
+ }
+
default:
return BaseState::onMessageReceived(msg);
}
@@ -4599,6 +4648,7 @@ void ACodec::LoadedState::stateEntered() {
mCodec->mRepeatFrameDelayUs = -1ll;
mCodec->mInputFormat.clear();
mCodec->mOutputFormat.clear();
+ mCodec->mBaseOutputFormat.clear();
if (mCodec->mShutdownInProgress) {
bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index ab8ac79..007c090 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -75,7 +75,7 @@ static const size_t kHighWaterMarkBytes = 200000;
// maximum time in paused state when offloading audio decompression. When elapsed, the AudioPlayer
// is destroyed to allow the audio DSP to power down.
-static int64_t kOffloadPauseMaxUs = 60000000ll;
+static int64_t kOffloadPauseMaxUs = 10000000ll;
struct AwesomeEvent : public TimedEventQueue::Event {
@@ -878,6 +878,16 @@ void AwesomePlayer::onStreamDone() {
return;
}
+ if (mFlags & AUTO_LOOPING) {
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+ if (streamType == AUDIO_STREAM_NOTIFICATION) {
+ ALOGW("disabling auto-loop for notification");
+ modifyFlags(AUTO_LOOPING, CLEAR);
+ }
+ }
if ((mFlags & LOOPING)
|| ((mFlags & AUTO_LOOPING)
&& (mAudioSink == NULL || mAudioSink->realtime()))) {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 853e8fc..ad12bdd 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -130,6 +130,7 @@ static int32_t getColorFormat(const char* colorFormat) {
"CameraSource::getColorFormat", colorFormat);
CHECK(!"Unknown color format");
+ return -1;
}
CameraSource *CameraSource::Create(const String16 &clientName) {
@@ -677,63 +678,80 @@ void CameraSource::stopCameraRecording() {
void CameraSource::releaseCamera() {
ALOGV("releaseCamera");
- if (mCamera != 0) {
+ sp<Camera> camera;
+ bool coldCamera = false;
+ {
+ Mutex::Autolock autoLock(mLock);
+ // get a local ref and clear ref to mCamera now
+ camera = mCamera;
+ mCamera.clear();
+ coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
+ }
+
+ if (camera != 0) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ if (coldCamera) {
ALOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- mCamera->disconnect();
+ camera->stopPreview();
+ camera->disconnect();
}
- mCamera->unlock();
- mCamera.clear();
- mCamera = 0;
+ camera->unlock();
IPCThreadState::self()->restoreCallingIdentity(token);
}
- if (mCameraRecordingProxy != 0) {
- IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
- mCameraRecordingProxy.clear();
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mCameraRecordingProxy != 0) {
+ IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
+ mCameraRecordingProxy.clear();
+ }
+ mCameraFlags = 0;
}
- mCameraFlags = 0;
}
status_t CameraSource::reset() {
ALOGD("reset: E");
- Mutex::Autolock autoLock(mLock);
- mStarted = false;
- mFrameAvailableCondition.signal();
- int64_t token;
- bool isTokenValid = false;
- if (mCamera != 0) {
- token = IPCThreadState::self()->clearCallingIdentity();
- isTokenValid = true;
- }
- releaseQueuedFrames();
- while (!mFramesBeingEncoded.empty()) {
- if (NO_ERROR !=
- mFrameCompleteCondition.waitRelative(mLock,
- mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
- ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
- mFramesBeingEncoded.size());
+ {
+ Mutex::Autolock autoLock(mLock);
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
+ int64_t token;
+ bool isTokenValid = false;
+ if (mCamera != 0) {
+ token = IPCThreadState::self()->clearCallingIdentity();
+ isTokenValid = true;
+ }
+ releaseQueuedFrames();
+ while (!mFramesBeingEncoded.empty()) {
+ if (NO_ERROR !=
+ mFrameCompleteCondition.waitRelative(mLock,
+ mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+ ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
+ mFramesBeingEncoded.size());
+ }
+ }
+ stopCameraRecording();
+ if (isTokenValid) {
+ IPCThreadState::self()->restoreCallingIdentity(token);
}
- }
- stopCameraRecording();
- releaseCamera();
- if (isTokenValid) {
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
- if (mCollectStats) {
- ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
- mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
- mLastFrameTimestampUs - mFirstFrameTimeUs);
- }
+ if (mCollectStats) {
+ ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
+ mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
+ mLastFrameTimestampUs - mFirstFrameTimeUs);
+ }
- if (mNumGlitches > 0) {
- ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ if (mNumGlitches > 0) {
+ ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ }
+
+ CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
}
- CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+ releaseCamera();
+
ALOGD("reset: X");
return OK;
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index f6525ae..9f20b1d 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -41,6 +41,11 @@
#include "include/ESDS.h"
+
+#ifndef __predict_false
+#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
+#endif
+
#define WARN_UNLESS(condition, message, ...) \
( (__predict_false(condition)) ? false : ({ \
ALOGW("Condition %s failed " message, #condition, ##__VA_ARGS__); \
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5f55484..e1c8a41 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -738,6 +738,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
err, actionCode, mState);
if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
+ mFlags &= ~kFlagIsComponentAllocated;
}
bool sendErrorResponse = true;
@@ -863,6 +864,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
{
CHECK_EQ(mState, INITIALIZING);
setState(INITIALIZED);
+ mFlags |= kFlagIsComponentAllocated;
CHECK(msg->findString("componentName", &mComponentName));
@@ -1009,6 +1011,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
mFlags |= kFlagOutputFormatChanged;
postActivityNotificationIfPossible();
}
+
+ // Notify mCrypto of video resolution changes
+ if (mCrypto != NULL) {
+ int32_t height, width;
+ if (mOutputFormat->findInt32("height", &height) &&
+ mOutputFormat->findInt32("width", &width)) {
+ mCrypto->notifyResolution(width, height);
+ }
+ }
+
break;
}
@@ -1136,6 +1148,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
setState(UNINITIALIZED);
mComponentName.clear();
}
+ mFlags &= ~kFlagIsComponentAllocated;
(new AMessage)->postReply(mReplyID);
break;
@@ -1313,8 +1326,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState == FLUSHED) {
+ setState(STARTED);
mCodec->signalResume();
PostReplyWithError(replyID, OK);
+ break;
} else if (mState != CONFIGURED) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
@@ -1336,9 +1351,13 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != INITIALIZED
+ if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
+ && mState != INITIALIZED
&& mState != CONFIGURED && !isExecuting()) {
- // We may be in "UNINITIALIZED" state already without the
+ // 1) Permit release to shut down the component if allocated.
+ //
+ // 2) We may be in "UNINITIALIZED" state already and
+ // also shutdown the encoder/decoder without the
// client being aware of this if media server died while
// we were being stopped. The client would assume that
// after stop() returned, it would be safe to call release()
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index b9466da..c26e909 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -422,19 +422,11 @@ status_t MediaCodecSource::initEncoder() {
}
}
- err = mEncoder->start();
-
- if (err != OK) {
- return err;
- }
-
- err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+ mEncoderActivityNotify = new AMessage(
+ kWhatEncoderActivity, mReflector->id());
+ mEncoder->setCallback(mEncoderActivityNotify);
- if (err != OK) {
- return err;
- }
-
- err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+ err = mEncoder->start();
if (err != OK) {
return err;
@@ -461,14 +453,6 @@ void MediaCodecSource::releaseEncoder() {
mbuf->release();
}
}
-
- for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
- sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
- accessUnit->setMediaBufferBase(NULL);
- }
-
- mEncoderInputBuffers.clear();
- mEncoderOutputBuffers.clear();
}
status_t MediaCodecSource::postSynchronouslyAndReturnError(
@@ -539,20 +523,6 @@ void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
}
}
-void MediaCodecSource::scheduleDoMoreWork() {
- if (mDoMoreWorkPending) {
- return;
- }
-
- mDoMoreWorkPending = true;
-
- if (mEncoderActivityNotify == NULL) {
- mEncoderActivityNotify = new AMessage(
- kWhatEncoderActivity, mReflector->id());
- }
- mEncoder->requestActivityNotification(mEncoderActivityNotify);
-}
-
status_t MediaCodecSource::feedEncoderInputBuffers() {
while (!mInputBufferQueue.empty()
&& !mAvailEncoderInputIndices.empty()) {
@@ -587,16 +557,22 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {
#endif // DEBUG_DRIFT_TIME
}
+ sp<ABuffer> inbuf;
+ status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
+ if (err != OK || inbuf == NULL) {
+ mbuf->release();
+ signalEOS();
+ break;
+ }
+
size = mbuf->size();
- memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
- mbuf->data(), size);
+ memcpy(inbuf->data(), mbuf->data(), size);
if (mIsVideo) {
// video encoder will release MediaBuffer when done
// with underlying data.
- mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
- mbuf);
+ inbuf->setMediaBufferBase(mbuf);
} else {
mbuf->release();
}
@@ -615,112 +591,6 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {
return OK;
}
-status_t MediaCodecSource::doMoreWork(int32_t numInput, int32_t numOutput) {
- status_t err = OK;
-
- if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
- while (numInput-- > 0) {
- size_t bufferIndex;
- err = mEncoder->dequeueInputBuffer(&bufferIndex);
-
- if (err != OK) {
- break;
- }
-
- mAvailEncoderInputIndices.push_back(bufferIndex);
- }
-
- feedEncoderInputBuffers();
- }
-
- while (numOutput-- > 0) {
- size_t bufferIndex;
- size_t offset;
- size_t size;
- int64_t timeUs;
- uint32_t flags;
- err = mEncoder->dequeueOutputBuffer(
- &bufferIndex, &offset, &size, &timeUs, &flags);
-
- if (err != OK) {
- if (err == INFO_FORMAT_CHANGED) {
- continue;
- } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
- mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
- continue;
- }
-
- if (err == -EAGAIN) {
- err = OK;
- }
- break;
- }
- if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) {
- sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
-
- MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
- memcpy(mbuf->data(), outbuf->data(), outbuf->size());
-
- if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
- if (mIsVideo) {
- int64_t decodingTimeUs;
- if (mFlags & FLAG_USE_SURFACE_INPUT) {
- // GraphicBufferSource is supposed to discard samples
- // queued before start, and offset timeUs by start time
- CHECK_GE(timeUs, 0ll);
- // TODO:
- // Decoding time for surface source is unavailable,
- // use presentation time for now. May need to move
- // this logic into MediaCodec.
- decodingTimeUs = timeUs;
- } else {
- CHECK(!mDecodingTimeQueue.empty());
- decodingTimeUs = *(mDecodingTimeQueue.begin());
- mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
- }
- mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
-
- ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
- timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
- } else {
- int64_t driftTimeUs = 0;
-#if DEBUG_DRIFT_TIME
- CHECK(!mDriftTimeQueue.empty());
- driftTimeUs = *(mDriftTimeQueue.begin());
- mDriftTimeQueue.erase(mDriftTimeQueue.begin());
- mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
-#endif // DEBUG_DRIFT_TIME
- ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
- timeUs, timeUs / 1E6, driftTimeUs);
- }
- mbuf->meta_data()->setInt64(kKeyTime, timeUs);
- } else {
- mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
- }
- if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
- mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
- }
- mbuf->setObserver(this);
- mbuf->add_ref();
-
- {
- Mutex::Autolock autoLock(mOutputBufferLock);
- mOutputBufferQueue.push_back(mbuf);
- mOutputBufferCond.signal();
- }
- }
-
- mEncoder->releaseOutputBuffer(bufferIndex);
-
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- err = ERROR_END_OF_STREAM;
- break;
- }
- }
-
- return err;
-}
-
status_t MediaCodecSource::onStart(MetaData *params) {
if (mStopping) {
ALOGE("Failed to start while we're stopping");
@@ -748,7 +618,6 @@ status_t MediaCodecSource::onStart(MetaData *params) {
startTimeUs = -1ll;
}
resume(startTimeUs);
- scheduleDoMoreWork();
} else {
CHECK(mPuller != NULL);
sp<AMessage> notify = new AMessage(
@@ -792,37 +661,110 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
mInputBufferQueue.push_back(mbuf);
feedEncoderInputBuffers();
- scheduleDoMoreWork();
break;
}
case kWhatEncoderActivity:
{
- mDoMoreWorkPending = false;
-
if (mEncoder == NULL) {
break;
}
- int32_t numInput, numOutput;
+ int32_t cbID;
+ CHECK(msg->findInt32("callbackID", &cbID));
+ if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
+ int32_t index;
+ CHECK(msg->findInt32("index", &index));
+
+ mAvailEncoderInputIndices.push_back(index);
+ feedEncoderInputBuffers();
+ } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
+ int32_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ int32_t flags;
+ native_handle_t* handle = NULL;
+
+ CHECK(msg->findInt32("index", &index));
+ CHECK(msg->findSize("offset", &offset));
+ CHECK(msg->findSize("size", &size));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("flags", &flags));
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mEncoder->releaseOutputBuffer(index);
+ signalEOS();
+ break;
+ }
- if (!msg->findInt32("input-buffers", &numInput)) {
- numInput = INT32_MAX;
- }
- if (!msg->findInt32("output-buffers", &numOutput)) {
- numOutput = INT32_MAX;
- }
+ sp<ABuffer> outbuf;
+ status_t err = mEncoder->getOutputBuffer(index, &outbuf);
+ if (err != OK || outbuf == NULL) {
+ signalEOS();
+ break;
+ }
- status_t err = doMoreWork(numInput, numOutput);
+ MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+ memcpy(mbuf->data(), outbuf->data(), outbuf->size());
- if (err == OK) {
- scheduleDoMoreWork();
- } else {
- // reached EOS, or error
- signalEOS(err);
- }
+ if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
+ if (mIsVideo) {
+ int64_t decodingTimeUs;
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ // GraphicBufferSource is supposed to discard samples
+ // queued before start, and offset timeUs by start time
+ CHECK_GE(timeUs, 0ll);
+ // TODO:
+ // Decoding time for surface source is unavailable,
+ // use presentation time for now. May need to move
+ // this logic into MediaCodec.
+ decodingTimeUs = timeUs;
+ } else {
+ CHECK(!mDecodingTimeQueue.empty());
+ decodingTimeUs = *(mDecodingTimeQueue.begin());
+ mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
+ }
+ mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
- break;
+ ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
+ timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
+ } else {
+ int64_t driftTimeUs = 0;
+#if DEBUG_DRIFT_TIME
+ CHECK(!mDriftTimeQueue.empty());
+ driftTimeUs = *(mDriftTimeQueue.begin());
+ mDriftTimeQueue.erase(mDriftTimeQueue.begin());
+ mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
+ timeUs, timeUs / 1E6, driftTimeUs);
+ }
+ mbuf->meta_data()->setInt64(kKeyTime, timeUs);
+ } else {
+ mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
+ }
+ if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+ mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+ mbuf->setObserver(this);
+ mbuf->add_ref();
+
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ mOutputBufferQueue.push_back(mbuf);
+ mOutputBufferCond.signal();
+ }
+
+ mEncoder->releaseOutputBuffer(index);
+ } else if (cbID == MediaCodec::CB_ERROR) {
+ status_t err;
+ CHECK(msg->findInt32("err", &err));
+ ALOGE("Encoder (%s) reported error : 0x%x",
+ mIsVideo ? "video" : "audio", err);
+ signalEOS();
+ }
+ break;
}
case kWhatStart:
{
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 9c03c9a..69f4989 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -47,10 +47,11 @@
#include <media/stagefright/SkipCutBuffer.h>
#include <utils/Vector.h>
-#include <OMX_Audio.h>
#include <OMX_AudioExt.h>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
+#include <OMX_VideoExt.h>
+#include <OMX_AsString.h>
#include "include/avc_utils.h"
@@ -817,6 +818,7 @@ static size_t getFrameSize(
CHECK(!"Should not be here. Unsupported color format.");
break;
}
+ return 0;
}
status_t OMXCodec::findTargetColorFormat(
@@ -4075,220 +4077,6 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
CHECK(!"should not be here.");
}
-static const char *imageCompressionFormatString(OMX_IMAGE_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_IMAGE_CodingUnused",
- "OMX_IMAGE_CodingAutoDetect",
- "OMX_IMAGE_CodingJPEG",
- "OMX_IMAGE_CodingJPEG2K",
- "OMX_IMAGE_CodingEXIF",
- "OMX_IMAGE_CodingTIFF",
- "OMX_IMAGE_CodingGIF",
- "OMX_IMAGE_CodingPNG",
- "OMX_IMAGE_CodingLZW",
- "OMX_IMAGE_CodingBMP",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *colorFormatString(OMX_COLOR_FORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_COLOR_FormatUnused",
- "OMX_COLOR_FormatMonochrome",
- "OMX_COLOR_Format8bitRGB332",
- "OMX_COLOR_Format12bitRGB444",
- "OMX_COLOR_Format16bitARGB4444",
- "OMX_COLOR_Format16bitARGB1555",
- "OMX_COLOR_Format16bitRGB565",
- "OMX_COLOR_Format16bitBGR565",
- "OMX_COLOR_Format18bitRGB666",
- "OMX_COLOR_Format18bitARGB1665",
- "OMX_COLOR_Format19bitARGB1666",
- "OMX_COLOR_Format24bitRGB888",
- "OMX_COLOR_Format24bitBGR888",
- "OMX_COLOR_Format24bitARGB1887",
- "OMX_COLOR_Format25bitARGB1888",
- "OMX_COLOR_Format32bitBGRA8888",
- "OMX_COLOR_Format32bitARGB8888",
- "OMX_COLOR_FormatYUV411Planar",
- "OMX_COLOR_FormatYUV411PackedPlanar",
- "OMX_COLOR_FormatYUV420Planar",
- "OMX_COLOR_FormatYUV420PackedPlanar",
- "OMX_COLOR_FormatYUV420SemiPlanar",
- "OMX_COLOR_FormatYUV422Planar",
- "OMX_COLOR_FormatYUV422PackedPlanar",
- "OMX_COLOR_FormatYUV422SemiPlanar",
- "OMX_COLOR_FormatYCbYCr",
- "OMX_COLOR_FormatYCrYCb",
- "OMX_COLOR_FormatCbYCrY",
- "OMX_COLOR_FormatCrYCbY",
- "OMX_COLOR_FormatYUV444Interleaved",
- "OMX_COLOR_FormatRawBayer8bit",
- "OMX_COLOR_FormatRawBayer10bit",
- "OMX_COLOR_FormatRawBayer8bitcompressed",
- "OMX_COLOR_FormatL2",
- "OMX_COLOR_FormatL4",
- "OMX_COLOR_FormatL8",
- "OMX_COLOR_FormatL16",
- "OMX_COLOR_FormatL24",
- "OMX_COLOR_FormatL32",
- "OMX_COLOR_FormatYUV420PackedSemiPlanar",
- "OMX_COLOR_FormatYUV422PackedSemiPlanar",
- "OMX_COLOR_Format18BitBGR666",
- "OMX_COLOR_Format24BitARGB6666",
- "OMX_COLOR_Format24BitABGR6666",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
- return "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar";
- } else if (type == OMX_QCOM_COLOR_FormatYVU420SemiPlanar) {
- return "OMX_QCOM_COLOR_FormatYVU420SemiPlanar";
- } else if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *videoCompressionFormatString(OMX_VIDEO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_VIDEO_CodingUnused",
- "OMX_VIDEO_CodingAutoDetect",
- "OMX_VIDEO_CodingMPEG2",
- "OMX_VIDEO_CodingH263",
- "OMX_VIDEO_CodingMPEG4",
- "OMX_VIDEO_CodingWMV",
- "OMX_VIDEO_CodingRV",
- "OMX_VIDEO_CodingAVC",
- "OMX_VIDEO_CodingMJPEG",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_CodingUnused",
- "OMX_AUDIO_CodingAutoDetect",
- "OMX_AUDIO_CodingPCM",
- "OMX_AUDIO_CodingADPCM",
- "OMX_AUDIO_CodingAMR",
- "OMX_AUDIO_CodingGSMFR",
- "OMX_AUDIO_CodingGSMEFR",
- "OMX_AUDIO_CodingGSMHR",
- "OMX_AUDIO_CodingPDCFR",
- "OMX_AUDIO_CodingPDCEFR",
- "OMX_AUDIO_CodingPDCHR",
- "OMX_AUDIO_CodingTDMAFR",
- "OMX_AUDIO_CodingTDMAEFR",
- "OMX_AUDIO_CodingQCELP8",
- "OMX_AUDIO_CodingQCELP13",
- "OMX_AUDIO_CodingEVRC",
- "OMX_AUDIO_CodingSMV",
- "OMX_AUDIO_CodingG711",
- "OMX_AUDIO_CodingG723",
- "OMX_AUDIO_CodingG726",
- "OMX_AUDIO_CodingG729",
- "OMX_AUDIO_CodingAAC",
- "OMX_AUDIO_CodingMP3",
- "OMX_AUDIO_CodingSBC",
- "OMX_AUDIO_CodingVORBIS",
- "OMX_AUDIO_CodingOPUS",
- "OMX_AUDIO_CodingWMA",
- "OMX_AUDIO_CodingRA",
- "OMX_AUDIO_CodingMIDI",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioPCMModeString(OMX_AUDIO_PCMMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_PCMModeLinear",
- "OMX_AUDIO_PCMModeALaw",
- "OMX_AUDIO_PCMModeMULaw",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrBandModeString(OMX_AUDIO_AMRBANDMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRBandModeUnused",
- "OMX_AUDIO_AMRBandModeNB0",
- "OMX_AUDIO_AMRBandModeNB1",
- "OMX_AUDIO_AMRBandModeNB2",
- "OMX_AUDIO_AMRBandModeNB3",
- "OMX_AUDIO_AMRBandModeNB4",
- "OMX_AUDIO_AMRBandModeNB5",
- "OMX_AUDIO_AMRBandModeNB6",
- "OMX_AUDIO_AMRBandModeNB7",
- "OMX_AUDIO_AMRBandModeWB0",
- "OMX_AUDIO_AMRBandModeWB1",
- "OMX_AUDIO_AMRBandModeWB2",
- "OMX_AUDIO_AMRBandModeWB3",
- "OMX_AUDIO_AMRBandModeWB4",
- "OMX_AUDIO_AMRBandModeWB5",
- "OMX_AUDIO_AMRBandModeWB6",
- "OMX_AUDIO_AMRBandModeWB7",
- "OMX_AUDIO_AMRBandModeWB8",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrFrameFormatString(OMX_AUDIO_AMRFRAMEFORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRFrameFormatConformance",
- "OMX_AUDIO_AMRFrameFormatIF1",
- "OMX_AUDIO_AMRFrameFormatIF2",
- "OMX_AUDIO_AMRFrameFormatFSF",
- "OMX_AUDIO_AMRFrameFormatRTPPayload",
- "OMX_AUDIO_AMRFrameFormatITU",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -4319,10 +4107,10 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nStride = %" PRIu32 "\n", imageDef->nStride);
printf(" eCompressionFormat = %s\n",
- imageCompressionFormatString(imageDef->eCompressionFormat));
+ asString(imageDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(imageDef->eColorFormat));
+ asString(imageDef->eColorFormat));
break;
}
@@ -4338,10 +4126,10 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nStride = %" PRIu32 "\n", videoDef->nStride);
printf(" eCompressionFormat = %s\n",
- videoCompressionFormatString(videoDef->eCompressionFormat));
+ asString(videoDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(videoDef->eColorFormat));
+ asString(videoDef->eColorFormat));
break;
}
@@ -4353,7 +4141,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("\n");
printf(" // Audio\n");
printf(" eEncoding = %s\n",
- audioCodingTypeString(audioDef->eEncoding));
+ asString(audioDef->eEncoding));
if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) {
OMX_AUDIO_PARAM_PCMMODETYPE params;
@@ -4373,7 +4161,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
params.eNumData == OMX_NumericalDataSigned
? "signed" : "unsigned");
- printf(" ePCMMode = %s\n", audioPCMModeString(params.ePCMMode));
+ printf(" ePCMMode = %s\n", asString(params.ePCMMode));
} else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) {
OMX_AUDIO_PARAM_AMRTYPE amr;
InitOMXParams(&amr);
@@ -4385,9 +4173,9 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nChannels = %" PRIu32 "\n", amr.nChannels);
printf(" eAMRBandMode = %s\n",
- amrBandModeString(amr.eAMRBandMode));
+ asString(amr.eAMRBandMode));
printf(" eAMRFrameFormat = %s\n",
- amrFrameFormatString(amr.eAMRFrameFormat));
+ asString(amr.eAMRFrameFormat));
}
break;
@@ -4696,12 +4484,7 @@ status_t QueryCodec(
const char *componentName, const char *mime,
bool isEncoder,
CodecCapabilities *caps) {
- if (strncmp(componentName, "OMX.", 4)) {
- // Not an OpenMax component but a software codec.
- caps->mFlags = 0;
- caps->mComponentName = componentName;
- return OK;
- }
+ bool isVideo = !strncasecmp(mime, "video/", 6);
sp<OMXCodecObserver> observer = new OMXCodecObserver;
IOMX::node_id node;
@@ -4716,59 +4499,62 @@ status_t QueryCodec(
caps->mFlags = 0;
caps->mComponentName = componentName;
- OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
- InitOMXParams(&param);
+ // NOTE: OMX does not provide a way to query AAC profile support
+ if (isVideo) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(&param);
- param.nPortIndex = !isEncoder ? 0 : 1;
+ param.nPortIndex = !isEncoder ? 0 : 1;
- for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoProfileLevelQuerySupported,
- &param, sizeof(param));
-
- if (err != OK) {
- break;
- }
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoProfileLevelQuerySupported,
+ &param, sizeof(param));
- CodecProfileLevel profileLevel;
- profileLevel.mProfile = param.eProfile;
- profileLevel.mLevel = param.eLevel;
+ if (err != OK) {
+ break;
+ }
- caps->mProfileLevels.push(profileLevel);
- }
+ CodecProfileLevel profileLevel;
+ profileLevel.mProfile = param.eProfile;
+ profileLevel.mLevel = param.eLevel;
- // Color format query
- // return colors in the order reported by the OMX component
- // prefix "flexible" standard ones with the flexible equivalent
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- InitOMXParams(&portFormat);
- portFormat.nPortIndex = !isEncoder ? 1 : 0;
- for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoPortFormat,
- &portFormat, sizeof(portFormat));
- if (err != OK) {
- break;
+ caps->mProfileLevels.push(profileLevel);
}
- OMX_U32 flexibleEquivalent;
- if (ACodec::isFlexibleColorFormat(
- omx, node, portFormat.eColorFormat, &flexibleEquivalent)) {
- bool marked = false;
- for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
- if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
- marked = true;
- break;
- }
+ // Color format query
+ // return colors in the order reported by the OMX component
+ // prefix "flexible" standard ones with the flexible equivalent
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = !isEncoder ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
}
- if (!marked) {
- caps->mColorFormats.push(flexibleEquivalent);
+
+ OMX_U32 flexibleEquivalent;
+ if (ACodec::isFlexibleColorFormat(
+ omx, node, portFormat.eColorFormat, &flexibleEquivalent)) {
+ bool marked = false;
+ for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
+ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
+ marked = true;
+ break;
+ }
+ }
+ if (!marked) {
+ caps->mColorFormats.push(flexibleEquivalent);
+ }
}
+ caps->mColorFormats.push(portFormat.eColorFormat);
}
- caps->mColorFormats.push(portFormat.eColorFormat);
}
- if (!isEncoder && !strncmp(mime, "video/", 6)) {
+ if (isVideo && !isEncoder) {
if (omx->storeMetaDataInBuffers(
node, 1 /* port index */, OMX_TRUE) == OK ||
omx->prepareForAdaptivePlayback(
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index aeb8dd7..bd28583 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -38,6 +38,7 @@ extern "C" {
int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
+ long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op);
}
namespace android {
@@ -84,6 +85,8 @@ struct MyVorbisExtractor {
private:
struct Page {
uint64_t mGranulePosition;
+ int32_t mPrevPacketSize;
+ uint64_t mPrevPacketPos;
uint32_t mSerialNo;
uint32_t mPageNo;
uint8_t mFlags;
@@ -121,6 +124,8 @@ private:
status_t verifyHeader(
MediaBuffer *buffer, uint8_t type);
+ int32_t packetBlockSize(MediaBuffer *buffer);
+
void parseFileMetaData();
status_t findPrevGranulePosition(off64_t pageOffset, uint64_t *granulePos);
@@ -373,6 +378,7 @@ status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
mFirstPacketInPage = true;
mCurrentPageSamples = 0;
mCurrentPage.mNumSegments = 0;
+ mCurrentPage.mPrevPacketSize = -1;
mNextLaceIndex = 0;
// XXX what if new page continues packet from last???
@@ -489,16 +495,6 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
tmp->set_range(0, buffer->range_length());
buffer->release();
} else {
- // XXX Not only is this not technically the correct time for
- // this packet, we also stamp every packet in this page
- // with the same time. This needs fixing later.
-
- if (mVi.rate) {
- // Rate may not have been initialized yet if we're currently
- // reading the configuration packets...
- // Fortunately, the timestamp doesn't matter for those.
- timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate;
- }
tmp->set_range(0, 0);
}
buffer = tmp;
@@ -521,16 +517,34 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
if (gotFullPacket) {
// We've just read the entire packet.
- if (timeUs >= 0) {
- buffer->meta_data()->setInt64(kKeyTime, timeUs);
- }
-
if (mFirstPacketInPage) {
buffer->meta_data()->setInt32(
kKeyValidSamples, mCurrentPageSamples);
mFirstPacketInPage = false;
}
+ if (mVi.rate) {
+ // Rate may not have been initialized yet if we're currently
+ // reading the configuration packets...
+ // Fortunately, the timestamp doesn't matter for those.
+ int32_t curBlockSize = packetBlockSize(buffer);
+ if (mCurrentPage.mPrevPacketSize < 0) {
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ mCurrentPage.mPrevPacketPos =
+ mCurrentPage.mGranulePosition - mCurrentPageSamples;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ } else {
+ // The effective block size is the average of the two overlapped blocks
+ int32_t actualBlockSize =
+ (curBlockSize + mCurrentPage.mPrevPacketSize) / 2;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ // The actual size output by the decoder will be half the effective
+ // size, due to the overlap
+ mCurrentPage.mPrevPacketPos += actualBlockSize / 2;
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ }
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+ }
*out = buffer;
return OK;
@@ -686,6 +700,35 @@ void MyVorbisExtractor::buildTableOfContents() {
}
}
+int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) {
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+
+ size_t size = buffer->range_length();
+
+ ogg_buffer buf;
+ buf.data = (uint8_t *)data;
+ buf.size = size;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = size;
+ ref.next = NULL;
+
+ ogg_packet pack;
+ pack.packet = &ref;
+ pack.bytes = ref.length;
+ pack.b_o_s = 0;
+ pack.e_o_s = 0;
+ pack.granulepos = 0;
+ pack.packetno = 0;
+
+ return vorbis_packet_blocksize(&mVi, &pack);
+}
+
status_t MyVorbisExtractor::verifyHeader(
MediaBuffer *buffer, uint8_t type) {
const uint8_t *data =
@@ -730,6 +773,10 @@ status_t MyVorbisExtractor::verifyHeader(
ALOGV("upper-bitrate = %ld", mVi.bitrate_upper);
ALOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
ALOGV("window-bitrate = %ld", mVi.bitrate_window);
+ ALOGV("blocksizes: %d/%d",
+ vorbis_info_blocksize(&mVi, 0),
+ vorbis_info_blocksize(&mVi, 1)
+ );
off64_t size;
if (mSource->getSize(&size) == OK) {
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 4e1c65c..530383b 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -448,7 +448,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
}
// Part of the BufferQueue::ConsumerListener
-void SurfaceMediaSource::onFrameAvailable() {
+void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
ALOGV("onFrameAvailable");
sp<FrameAvailableListener> listener;
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index fc03607..5ec3438 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -222,28 +222,25 @@ status_t getNextNALUnit(
*nalStart = NULL;
*nalSize = 0;
- if (size == 0) {
+ if (size < 3) {
return -EAGAIN;
}
- // Skip any number of leading 0x00.
-
size_t offset = 0;
- while (offset < size && data[offset] == 0x00) {
- ++offset;
- }
-
- if (offset == size) {
- return -EAGAIN;
- }
// A valid startcode consists of at least two 0x00 bytes followed by 0x01.
-
- if (offset < 2 || data[offset] != 0x01) {
- return ERROR_MALFORMED;
+ for (; offset + 2 < size; ++offset) {
+ if (data[offset + 2] == 0x01 && data[offset] == 0x00
+ && data[offset + 1] == 0x00) {
+ break;
+ }
}
-
- ++offset;
+ if (offset + 2 >= size) {
+ *_data = &data[offset];
+ *_size = 2;
+ return -EAGAIN;
+ }
+ offset += 3;
size_t startOffset = offset;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 7209cb3..495bad0 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -929,30 +929,22 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
}
if (mEndOfInput) {
- if (outputDelayRingBufferSamplesAvailable() > 0
- && outputDelayRingBufferSamplesAvailable()
- < mStreamInfo->frameSize * mStreamInfo->numChannels) {
- ALOGE("not a complete frame of samples available");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
-
- if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ int ringBufAvail = outputDelayRingBufferSamplesAvailable();
+ if (!outQueue.empty()
+ && ringBufAvail < mStreamInfo->frameSize * mStreamInfo->numChannels) {
if (!mEndOfOutput) {
- // send empty block signaling EOS
+ // send partial or empty block signaling EOS
mEndOfOutput = true;
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- if (outHeader->nOffset != 0) {
- ALOGE("outHeader->nOffset != 0 is not handled");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
+ + outHeader->nOffset);
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, ringBufAvail);
+ if (ns < 0) {
+ ns = 0;
}
-
- outHeader->nFilledLen = 0;
+ outHeader->nFilledLen = ns;
outHeader->nFlags = OMX_BUFFERFLAG_EOS;
outHeader->nTimeStamp = mBufferTimestamps.itemAt(0);
@@ -991,7 +983,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
}
int32_t ns = outputDelayRingBufferGetSamples(0, avail);
if (ns != avail) {
- ALOGE("not a complete frame of samples available");
+ ALOGW("not a complete frame of samples available");
break;
}
mOutputBufferCount++;
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 1513b0b..928a74f 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftAVCEncoder"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "avcenc_api.h"
#include "avcenc_int.h"
@@ -25,6 +26,7 @@
#include <HardwareAPI.h>
#include <MetadataBufferType.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -51,31 +53,36 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
+};
+
typedef struct LevelConversion {
OMX_U32 omxLevel;
AVCLevel avcLevel;
+ uint32_t maxMacroBlocks;
} LevelConcersion;
static LevelConversion ConversionTable[] = {
- { OMX_VIDEO_AVCLevel1, AVC_LEVEL1_B },
- { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1 },
- { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
- { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
- { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
- { OMX_VIDEO_AVCLevel2, AVC_LEVEL2 },
+ { OMX_VIDEO_AVCLevel1, AVC_LEVEL1_B, 99 },
+ { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1, 99 },
+ { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1, 396 },
+ { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2, 396 },
+ { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3, 396 },
+ { OMX_VIDEO_AVCLevel2, AVC_LEVEL2, 396 },
#if 0
- // encoding speed is very poor if video
- // resolution is higher than CIF
- { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
- { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
- { OMX_VIDEO_AVCLevel3, AVC_LEVEL3 },
- { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
- { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
- { OMX_VIDEO_AVCLevel4, AVC_LEVEL4 },
- { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
- { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
- { OMX_VIDEO_AVCLevel5, AVC_LEVEL5 },
- { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
+ // encoding speed is very poor if video resolution
+ // is higher than CIF or if level is higher than 2
+ { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1, 792 },
+ { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2, 1620 },
+ { OMX_VIDEO_AVCLevel3, AVC_LEVEL3, 1620 },
+ { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1, 3600 },
+ { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2, 5120 },
+ { OMX_VIDEO_AVCLevel4, AVC_LEVEL4, 8192 },
+ { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1, 8192 },
+ { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2, 8704 },
+ { OMX_VIDEO_AVCLevel5, AVC_LEVEL5, 22080 },
+ { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1, 36864 },
#endif
};
@@ -148,13 +155,11 @@ SoftAVCEncoder::SoftAVCEncoder(
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
- mVideoWidth(176),
- mVideoHeight(144),
- mVideoFrameRate(30),
- mVideoBitRate(192000),
- mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
- mStoreMetaDataInBuffers(false),
+ : SoftVideoEncoderOMXComponent(
+ name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
+ kProfileLevels, NELEM(kProfileLevels),
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mIDRFrameRefreshIntervalInSec(1),
mAVCEncProfile(AVC_BASELINE),
mAVCEncLevel(AVC_LEVEL2),
@@ -168,7 +173,13 @@ SoftAVCEncoder::SoftAVCEncoder(
mInputFrameData(NULL),
mSliceGroup(NULL) {
- initPorts();
+ const size_t kOutputBufferSize =
+ 320 * ConversionTable[NELEM(ConversionTable) - 1].maxMacroBlocks;
+
+ initPorts(
+ kNumBuffers, kNumBuffers, kOutputBufferSize,
+ MEDIA_MIMETYPE_VIDEO_AVC, 2 /* minCompressionRatio */);
+
ALOGI("Construct SoftAVCEncoder");
}
@@ -230,30 +241,28 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mEncParams->use_overrun_buffer = AVC_OFF;
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
- || mStoreMetaDataInBuffers) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
// Color conversion is needed.
free(mInputFrameData);
mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
}
// PV's AVC encoder requires the video dimension of multiple
- if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ if (mWidth % 16 != 0 || mHeight % 16 != 0) {
ALOGE("Video frame size %dx%d must be a multiple of 16",
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
return OMX_ErrorBadParameter;
}
- mEncParams->width = mVideoWidth;
- mEncParams->height = mVideoHeight;
- mEncParams->bitrate = mVideoBitRate;
- mEncParams->frame_rate = 1000 * mVideoFrameRate; // In frames/ms!
- mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
+ mEncParams->width = mWidth;
+ mEncParams->height = mHeight;
+ mEncParams->bitrate = mBitrate;
+ mEncParams->frame_rate = (1000 * mFramerate) >> 16; // In frames/ms!, mFramerate is in Q16
+ mEncParams->CPB_size = (uint32_t) (mBitrate >> 1);
- int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
- (((mVideoHeight + 15) >> 4) << 4)) >> 8;
+ int32_t nMacroBlocks = divUp(mWidth, 16) * divUp(mHeight, 16);
CHECK(mSliceGroup == NULL);
mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
CHECK(mSliceGroup != NULL);
@@ -272,7 +281,7 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mEncParams->idr_period = 1; // All I frames
} else {
mEncParams->idr_period =
- (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16; // mFramerate is in Q16
}
// Set profile and level
@@ -345,71 +354,9 @@ void SoftAVCEncoder::releaseOutputBuffers() {
mOutputBuffers.clear();
}
-void SoftAVCEncoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
- // 31584 is PV's magic number. Not sure why.
- const size_t kOutputBufferSize =
- (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kInputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>("video/raw");
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.xFramerate = (mVideoFrameRate << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kOutputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType = const_cast<char *>("video/avc");
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.xFramerate = (0 << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-}
-
OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
OMX_INDEXTYPE index, OMX_PTR params) {
switch (index) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -420,37 +367,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
}
bitRate->eControlRate = OMX_Video_ControlRateVariable;
- bitRate->nTargetBitrate = mVideoBitRate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
- } else {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- }
-
+ bitRate->nTargetBitrate = mBitrate;
return OMX_ErrorNone;
}
@@ -487,30 +404,8 @@ OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
- if (profileLevel->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- const size_t size =
- sizeof(ConversionTable) / sizeof(ConversionTable[0]);
-
- if (profileLevel->nProfileIndex >= size) {
- return OMX_ErrorNoMore;
- }
-
- profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
- profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
}
}
@@ -519,11 +414,6 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -534,105 +424,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
return OMX_ErrorUndefined;
}
- mVideoBitRate = bitRate->nTargetBitrate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamPortDefinition:
- {
- OMX_PARAM_PORTDEFINITIONTYPE *def =
- (OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (def->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (def->nPortIndex == 0) {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
- return OMX_ErrorUndefined;
- }
- } else {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
- return OMX_ErrorUndefined;
- }
- }
-
- OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
- if (OMX_ErrorNone != err) {
- return err;
- }
-
- if (def->nPortIndex == 0) {
- mVideoWidth = def->format.video.nFrameWidth;
- mVideoHeight = def->format.video.nFrameHeight;
- mVideoFrameRate = def->format.video.xFramerate >> 16;
- mVideoColorFormat = def->format.video.eColorFormat;
-
- OMX_PARAM_PORTDEFINITIONTYPE *portDef =
- &editPortInfo(0)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- portDef->format.video.xFramerate = def->format.video.xFramerate;
- portDef->format.video.eColorFormat =
- (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
- portDef = &editPortInfo(1)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- } else {
- mVideoBitRate = def->format.video.nBitrate;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- "video_encoder.avc",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
- ((formatParams->nIndex == 0 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
- (formatParams->nIndex == 1 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
- (formatParams->nIndex == 2 &&
- formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
- return OMX_ErrorUndefined;
- }
- mVideoColorFormat = formatParams->eColorFormat;
- } else {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
- formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
- return OMX_ErrorUndefined;
- }
- }
-
+ mBitrate = bitRate->nTargetBitrate;
return OMX_ErrorNone;
}
@@ -669,29 +461,8 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
return OMX_ErrorNone;
}
- case kStoreMetaDataExtensionIndex:
- {
- StoreMetaDataInBuffersParams *storeParams =
- (StoreMetaDataInBuffersParams*)params;
- if (storeParams->nPortIndex != 0) {
- ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
- __FUNCTION__);
- return OMX_ErrorUndefined;
- }
-
- mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
- ALOGV("StoreMetaDataInBuffers set to: %s",
- mStoreMetaDataInBuffers ? " true" : "false");
-
- if (mStoreMetaDataInBuffers) {
- mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
}
}
@@ -785,16 +556,16 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (inHeader->nFilledLen > 0) {
AVCFrameIO videoInput;
memset(&videoInput, 0, sizeof(videoInput));
- videoInput.height = ((mVideoHeight + 15) >> 4) << 4;
- videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ videoInput.height = align(mHeight, 16);
+ videoInput.pitch = align(mWidth, 16);
videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
const uint8_t *inputData = NULL;
- if (mStoreMetaDataInBuffers) {
+ if (mInputDataIsMeta) {
inputData =
extractGraphicBuffer(
- mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+ mInputFrameData, (mWidth * mHeight * 3) >> 1,
inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
@@ -804,9 +575,9 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
// TODO: Verify/convert pixel format enum
} else {
inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ inputData, mInputFrameData, mWidth, mHeight);
inputData = mInputFrameData;
}
}
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index 3e6cd0a..81de109 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -66,12 +66,6 @@ private:
int32_t mFlags;
} InputBufferInfo;
- int32_t mVideoWidth;
- int32_t mVideoHeight;
- int32_t mVideoFrameRate;
- int32_t mVideoBitRate;
- int32_t mVideoColorFormat;
- bool mStoreMetaDataInBuffers;
int32_t mIDRFrameRefreshIntervalInSec;
AVCProfile mAVCEncProfile;
AVCLevel mAVCEncLevel;
@@ -92,7 +86,6 @@ private:
Vector<MediaBuffer *> mOutputBuffers;
Vector<InputBufferInfo> mInputBufferInfoVec;
- void initPorts();
OMX_ERRORTYPE initEncParams();
OMX_ERRORTYPE initEncoder();
OMX_ERRORTYPE releaseEncoder();
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index f4cba54..cddd176 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -26,6 +26,7 @@
#include "SoftHEVC.h"
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <OMX_VideoExt.h>
@@ -75,8 +76,12 @@ SoftHEVC::SoftHEVC(
mNewWidth(mWidth),
mNewHeight(mHeight),
mChangingResolution(false) {
- initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers,
- CODEC_MIME_TYPE);
+ const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+ // INPUT_BUF_SIZE is given by HEVC codec as minimum input size
+ initPorts(
+ kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
+ kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
CHECK_EQ(initDecoder(), (status_t)OK);
}
@@ -644,7 +649,7 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
// The decoder should be fixed so that |u4_error_code| instead of |status| returns
// IHEVCD_UNSUPPORTED_DIMENSIONS.
bool unsupportedDimensions =
- ((IHEVCD_UNSUPPORTED_DIMENSIONS == status)
+ ((IHEVCD_UNSUPPORTED_DIMENSIONS == (IHEVCD_CXA_ERROR_CODES_T)status)
|| (IHEVCD_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code));
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index 142969c..ede645c 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -21,6 +21,7 @@
#include "SoftMPEG4.h"
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/IOMX.h>
@@ -70,7 +71,7 @@ SoftMPEG4::SoftMPEG4(
mPvTime(0) {
initPorts(
kNumInputBuffers,
- 8192 /* inputBufferSize */,
+ 352 * 288 * 3 / 2 /* minInputBufferSize */,
kNumOutputBuffers,
(mMode == MODE_MPEG4)
? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263);
@@ -353,14 +354,14 @@ void SoftMPEG4::onReset() {
}
}
-void SoftMPEG4::updatePortDefinitions(bool updateCrop) {
- SoftVideoDecoderOMXComponent::updatePortDefinitions(updateCrop);
+void SoftMPEG4::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+ SoftVideoDecoderOMXComponent::updatePortDefinitions(updateCrop, updateInputSize);
/* We have to align our width and height - this should affect stride! */
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
- def->nBufferSize =
- (((def->format.video.nFrameWidth + 15) & -16)
- * ((def->format.video.nFrameHeight + 15) & -16) * 3) / 2;
+ def->format.video.nStride = align(def->format.video.nStride, 16);
+ def->format.video.nSliceHeight = align(def->format.video.nSliceHeight, 16);
+ def->nBufferSize = (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
}
} // namespace android
@@ -382,5 +383,6 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
index 31577e2..4114e7d 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -66,7 +66,7 @@ private:
status_t initDecoder();
- virtual void updatePortDefinitions(bool updateCrop = true);
+ virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
bool handlePortSettingsChange();
DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4);
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 28edff8..8240f83 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftMPEG4Encoder"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "mp4enc_api.h"
#include "OMX_Video.h"
@@ -24,6 +25,7 @@
#include <HardwareAPI.h>
#include <MetadataBufferType.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -46,19 +48,30 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
+static const CodecProfileLevel kMPEG4ProfileLevels[] = {
+ { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level2 },
+};
+
+static const CodecProfileLevel kH263ProfileLevels[] = {
+ { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level45 },
+};
+
SoftMPEG4Encoder::SoftMPEG4Encoder(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char *mime,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(
+ name, componentRole, codingType,
+ profileLevels, numProfileLevels,
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
- mVideoWidth(176),
- mVideoHeight(144),
- mVideoFrameRate(30),
- mVideoBitRate(192000),
- mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
- mStoreMetaDataInBuffers(false),
mIDRFrameRefreshIntervalInSec(1),
mNumInputFrames(-1),
mStarted(false),
@@ -68,13 +81,15 @@ SoftMPEG4Encoder::SoftMPEG4Encoder(
mEncParams(new tagvideoEncOptions),
mInputFrameData(NULL) {
- if (!strcmp(name, "OMX.google.h263.encoder")) {
+ if (codingType == OMX_VIDEO_CodingH263) {
mEncodeMode = H263_MODE;
- } else {
- CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
}
- initPorts();
+ // 256 * 1024 is a magic number for PV's encoder, not sure why
+ const size_t kOutputBufferSize = 256 * 1024;
+
+ initPorts(kNumBuffers, kNumBuffers, kOutputBufferSize, mime);
+
ALOGI("Construct SoftMPEG4Encoder");
}
@@ -98,9 +113,9 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
return OMX_ErrorUndefined;
}
mEncParams->encMode = mEncodeMode;
- mEncParams->encWidth[0] = mVideoWidth;
- mEncParams->encHeight[0] = mVideoHeight;
- mEncParams->encFrameRate[0] = mVideoFrameRate;
+ mEncParams->encWidth[0] = mWidth;
+ mEncParams->encHeight[0] = mHeight;
+ mEncParams->encFrameRate[0] = mFramerate >> 16; // mFramerate is in Q16 format
mEncParams->rcType = VBR_1;
mEncParams->vbvDelay = 5.0f;
@@ -111,27 +126,26 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
mEncParams->rvlcEnable = PV_OFF;
mEncParams->numLayers = 1;
mEncParams->timeIncRes = 1000;
- mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
+ mEncParams->tickPerSrc = ((int64_t)mEncParams->timeIncRes << 16) / mFramerate;
- mEncParams->bitRate[0] = mVideoBitRate;
+ mEncParams->bitRate[0] = mBitrate;
mEncParams->iQuant[0] = 15;
mEncParams->pQuant[0] = 12;
mEncParams->quantType[0] = 0;
mEncParams->noFrameSkipped = PV_OFF;
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
- || mStoreMetaDataInBuffers) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
// Color conversion is needed.
free(mInputFrameData);
mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
}
// PV's MPEG4 encoder requires the video dimension of multiple
- if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ if (mWidth % 16 != 0 || mHeight % 16 != 0) {
ALOGE("Video frame size %dx%d must be a multiple of 16",
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
return OMX_ErrorBadParameter;
}
@@ -142,7 +156,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
mEncParams->intraPeriod = 1; // All I frames
} else {
mEncParams->intraPeriod =
- (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16;
}
mEncParams->numIntraMB = 0;
@@ -201,81 +215,9 @@ OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {
return OMX_ErrorNone;
}
-void SoftMPEG4Encoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
- // 256 * 1024 is a magic number for PV's encoder, not sure why
- const size_t kOutputBufferSize =
- (kInputBufferSize > 256 * 1024)
- ? kInputBufferSize: 256 * 1024;
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kInputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>("video/raw");
-
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.xFramerate = (mVideoFrameRate << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kOutputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? const_cast<char *>(MEDIA_MIMETYPE_VIDEO_MPEG4)
- : const_cast<char *>(MEDIA_MIMETYPE_VIDEO_H263);
-
- def.format.video.eCompressionFormat =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? OMX_VIDEO_CodingMPEG4
- : OMX_VIDEO_CodingH263;
-
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.xFramerate = (0 << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-}
-
OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
OMX_INDEXTYPE index, OMX_PTR params) {
switch (index) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -286,41 +228,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
}
bitRate->eControlRate = OMX_Video_ControlRateVariable;
- bitRate->nTargetBitrate = mVideoBitRate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
- } else {
- formatParams->eCompressionFormat =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? OMX_VIDEO_CodingMPEG4
- : OMX_VIDEO_CodingH263;
-
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- }
-
+ bitRate->nTargetBitrate = mBitrate;
return OMX_ErrorNone;
}
@@ -369,32 +277,8 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
- if (profileLevel->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- if (profileLevel->nProfileIndex > 0) {
- return OMX_ErrorNoMore;
- }
-
- if (mEncodeMode == H263_MODE) {
- profileLevel->eProfile = OMX_VIDEO_H263ProfileBaseline;
- profileLevel->eLevel = OMX_VIDEO_H263Level45;
- } else {
- profileLevel->eProfile = OMX_VIDEO_MPEG4ProfileCore;
- profileLevel->eLevel = OMX_VIDEO_MPEG4Level2;
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
}
}
@@ -403,11 +287,6 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -418,112 +297,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
return OMX_ErrorUndefined;
}
- mVideoBitRate = bitRate->nTargetBitrate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamPortDefinition:
- {
- OMX_PARAM_PORTDEFINITIONTYPE *def =
- (OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (def->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (def->nPortIndex == 0) {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
- return OMX_ErrorUndefined;
- }
- } else {
- if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
- def->format.video.eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
- (mEncodeMode == H263_MODE &&
- def->format.video.eCompressionFormat != OMX_VIDEO_CodingH263) ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
- return OMX_ErrorUndefined;
- }
- }
-
- OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
- if (OMX_ErrorNone != err) {
- return err;
- }
-
- if (def->nPortIndex == 0) {
- mVideoWidth = def->format.video.nFrameWidth;
- mVideoHeight = def->format.video.nFrameHeight;
- mVideoFrameRate = def->format.video.xFramerate >> 16;
- mVideoColorFormat = def->format.video.eColorFormat;
-
- OMX_PARAM_PORTDEFINITIONTYPE *portDef =
- &editPortInfo(0)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- portDef->format.video.xFramerate = def->format.video.xFramerate;
- portDef->format.video.eColorFormat =
- (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
- portDef = &editPortInfo(1)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- } else {
- mVideoBitRate = def->format.video.nBitrate;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- (mEncodeMode == H263_MODE)
- ? "video_encoder.h263": "video_encoder.mpeg4",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
- ((formatParams->nIndex == 0 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
- (formatParams->nIndex == 1 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
- (formatParams->nIndex == 2 &&
- formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
- return OMX_ErrorUndefined;
- }
- mVideoColorFormat = formatParams->eColorFormat;
- } else {
- if ((mEncodeMode == H263_MODE &&
- formatParams->eCompressionFormat != OMX_VIDEO_CodingH263) ||
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
- formatParams->eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
- formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
- return OMX_ErrorUndefined;
- }
- }
-
+ mBitrate = bitRate->nTargetBitrate;
return OMX_ErrorNone;
}
@@ -574,29 +348,8 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
return OMX_ErrorNone;
}
- case kStoreMetaDataExtensionIndex:
- {
- StoreMetaDataInBuffersParams *storeParams =
- (StoreMetaDataInBuffersParams*)params;
- if (storeParams->nPortIndex != 0) {
- ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
- __FUNCTION__);
- return OMX_ErrorUndefined;
- }
-
- mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
- ALOGV("StoreMetaDataInBuffers set to: %s",
- mStoreMetaDataInBuffers ? " true" : "false");
-
- if (mStoreMetaDataInBuffers) {
- mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
}
}
@@ -659,12 +412,12 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (inHeader->nFilledLen > 0) {
const uint8_t *inputData = NULL;
- if (mStoreMetaDataInBuffers) {
+ if (mInputDataIsMeta) {
inputData =
extractGraphicBuffer(
- mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+ mInputFrameData, (mWidth * mHeight * 3) >> 1,
inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
@@ -673,9 +426,9 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
}
} else {
inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ inputData, mInputFrameData, mWidth, mHeight);
inputData = mInputFrameData;
}
}
@@ -685,8 +438,8 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
VideoEncFrameIO vin, vout;
memset(&vin, 0, sizeof(vin));
memset(&vout, 0, sizeof(vout));
- vin.height = ((mVideoHeight + 15) >> 4) << 4;
- vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ vin.height = align(mHeight, 16);
+ vin.pitch = align(mWidth, 16);
vin.timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
vin.yChan = (uint8_t *)inputData;
vin.uChan = vin.yChan + vin.height * vin.pitch;
@@ -734,5 +487,19 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
android::SoftOMXComponent *createSoftOMXComponent(
const char *name, const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData, OMX_COMPONENTTYPE **component) {
- return new android::SoftMPEG4Encoder(name, callbacks, appData, component);
+ using namespace android;
+ if (!strcmp(name, "OMX.google.h263.encoder")) {
+ return new android::SoftMPEG4Encoder(
+ name, "video_encoder.h263", OMX_VIDEO_CodingH263, MEDIA_MIMETYPE_VIDEO_H263,
+ kH263ProfileLevels, NELEM(kH263ProfileLevels),
+ callbacks, appData, component);
+ } else if (!strcmp(name, "OMX.google.mpeg4.encoder")) {
+ return new android::SoftMPEG4Encoder(
+ name, "video_encoder.mpeg4", OMX_VIDEO_CodingMPEG4, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ kMPEG4ProfileLevels, NELEM(kMPEG4ProfileLevels),
+ callbacks, appData, component);
+ } else {
+ CHECK(!"Unknown component");
+ }
+ return NULL;
}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index d706bb4..3389c37 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -25,9 +25,16 @@
namespace android {
+struct CodecProfileLevel;
+
struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
SoftMPEG4Encoder(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char *mime,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component);
@@ -56,12 +63,6 @@ private:
} InputBufferInfo;
MP4EncodingMode mEncodeMode;
- int32_t mVideoWidth;
- int32_t mVideoHeight;
- int32_t mVideoFrameRate;
- int32_t mVideoBitRate;
- int32_t mVideoColorFormat;
- bool mStoreMetaDataInBuffers;
int32_t mIDRFrameRefreshIntervalInSec;
int64_t mNumInputFrames;
@@ -74,7 +75,6 @@ private:
uint8_t *mInputFrameData;
Vector<InputBufferInfo> mInputBufferInfoVec;
- void initPorts();
OMX_ERRORTYPE initEncParams();
OMX_ERRORTYPE initEncoder();
OMX_ERRORTYPE releaseEncoder();
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 828577a..8a95643 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -40,10 +40,13 @@ SoftVPX::SoftVPX(
mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
mCtx(NULL),
mImg(NULL) {
- initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */,
- kNumBuffers,
- codingType == OMX_VIDEO_CodingVP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9);
-
+ // arbitrary from avc/hevc as vpx does not specify a min compression ratio
+ const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
+ const char *mime = mMode == MODE_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+ initPorts(
+ kNumBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* inputBufferSize */,
+ kNumBuffers, mime, kMinCompressionRatio);
CHECK_EQ(initDecoder(), (status_t)OK);
}
@@ -189,4 +192,5 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index eb621d5..970acf3 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -19,6 +19,7 @@
#include "SoftVPXEncoder.h"
#include <utils/Log.h>
+#include <utils/misc.h>
#include <media/hardware/HardwareAPI.h>
#include <media/hardware/MetadataBufferType.h>
@@ -50,23 +51,29 @@ static int GetCPUCoreCount() {
return cpuCoreCount;
}
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
+};
+
SoftVPXEncoder::SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(
+ name, "video_encoder.vp8", OMX_VIDEO_CodingVP8,
+ kProfileLevels, NELEM(kProfileLevels),
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mCodecContext(NULL),
mCodecConfiguration(NULL),
mCodecInterface(NULL),
- mWidth(176),
- mHeight(144),
- mBitrate(192000), // in bps
- mFramerate(30 << 16), // in Q16 format
mBitrateUpdated(false),
mBitrateControlMode(VPX_VBR), // variable bitrate
mDCTPartitions(0),
mErrorResilience(OMX_FALSE),
- mColorFormat(OMX_COLOR_FormatYUV420Planar),
mLevel(OMX_VIDEO_VP8Level_Version0),
mKeyFrameInterval(0),
mMinQuantizer(0),
@@ -77,83 +84,22 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name,
mTemporalPatternIdx(0),
mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
mConversionBuffer(NULL),
- mInputDataIsMeta(false),
mKeyFrameRequested(false) {
memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
mTemporalLayerBitrateRatio[0] = 100;
- initPorts();
-}
+ const size_t kMinOutputBufferSize = 1024 * 1024; // arbitrary
-SoftVPXEncoder::~SoftVPXEncoder() {
- releaseEncoder();
+ initPorts(
+ kNumBuffers, kNumBuffers, kMinOutputBufferSize,
+ MEDIA_MIMETYPE_VIDEO_VP8, 2 /* minCompressionRatio */);
}
-void SoftVPXEncoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE inputPort;
- OMX_PARAM_PORTDEFINITIONTYPE outputPort;
-
- InitOMXParams(&inputPort);
- InitOMXParams(&outputPort);
-
- inputPort.nBufferCountMin = kNumBuffers;
- inputPort.nBufferCountActual = inputPort.nBufferCountMin;
- inputPort.bEnabled = OMX_TRUE;
- inputPort.bPopulated = OMX_FALSE;
- inputPort.eDomain = OMX_PortDomainVideo;
- inputPort.bBuffersContiguous = OMX_FALSE;
- inputPort.format.video.pNativeRender = NULL;
- inputPort.format.video.nFrameWidth = mWidth;
- inputPort.format.video.nFrameHeight = mHeight;
- inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
- inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
- inputPort.format.video.nBitrate = 0;
- // frameRate is in Q16 format.
- inputPort.format.video.xFramerate = mFramerate;
- inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
- inputPort.nPortIndex = kInputPortIndex;
- inputPort.eDir = OMX_DirInput;
- inputPort.nBufferAlignment = kInputBufferAlignment;
- inputPort.format.video.cMIMEType =
- const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
- inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- inputPort.format.video.eColorFormat = mColorFormat;
- inputPort.format.video.pNativeWindow = NULL;
- inputPort.nBufferSize =
- (inputPort.format.video.nStride *
- inputPort.format.video.nSliceHeight * 3) / 2;
-
- addPort(inputPort);
-
- outputPort.nBufferCountMin = kNumBuffers;
- outputPort.nBufferCountActual = outputPort.nBufferCountMin;
- outputPort.bEnabled = OMX_TRUE;
- outputPort.bPopulated = OMX_FALSE;
- outputPort.eDomain = OMX_PortDomainVideo;
- outputPort.bBuffersContiguous = OMX_FALSE;
- outputPort.format.video.pNativeRender = NULL;
- outputPort.format.video.nFrameWidth = mWidth;
- outputPort.format.video.nFrameHeight = mHeight;
- outputPort.format.video.nStride = outputPort.format.video.nFrameWidth;
- outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight;
- outputPort.format.video.nBitrate = mBitrate;
- outputPort.format.video.xFramerate = 0;
- outputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
- outputPort.nPortIndex = kOutputPortIndex;
- outputPort.eDir = OMX_DirOutput;
- outputPort.nBufferAlignment = kOutputBufferAlignment;
- outputPort.format.video.cMIMEType =
- const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VP8);
- outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8;
- outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- outputPort.format.video.pNativeWindow = NULL;
- outputPort.nBufferSize = 1024 * 1024; // arbitrary
-
- addPort(outputPort);
+SoftVPXEncoder::~SoftVPXEncoder() {
+ releaseEncoder();
}
-
status_t SoftVPXEncoder::initEncoder() {
vpx_codec_err_t codec_return;
@@ -409,38 +355,6 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
const int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoPortFormat: {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
-
- if (formatParams->nPortIndex == kInputPortIndex) {
- if (formatParams->nIndex >= kNumberOfSupportedColorFormats) {
- return OMX_ErrorNoMore;
- }
-
- // Color formats, in order of preference
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat =
- OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
-
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- formatParams->xFramerate = mFramerate;
- return OMX_ErrorNone;
- } else if (formatParams->nPortIndex == kOutputPortIndex) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- formatParams->xFramerate = 0;
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorBadPortIndex;
- }
- }
-
case OMX_IndexParamVideoBitrate: {
OMX_VIDEO_PARAM_BITRATETYPE *bitrate =
(OMX_VIDEO_PARAM_BITRATETYPE *)param;
@@ -495,54 +409,8 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported: {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
-
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- switch (profileAndLevel->nProfileIndex) {
- case 0:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0;
- break;
-
- case 1:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1;
- break;
-
- case 2:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2;
- break;
-
- case 3:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3;
- break;
-
- default:
- return OMX_ErrorNoMore;
- }
-
- profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoProfileLevelCurrent: {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
-
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- profileAndLevel->eLevel = mLevel;
- profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, param);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, param);
}
}
@@ -553,30 +421,10 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
const int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamStandardComponentRole:
- return internalSetRoleParams(
- (const OMX_PARAM_COMPONENTROLETYPE *)param);
-
case OMX_IndexParamVideoBitrate:
return internalSetBitrateParams(
(const OMX_VIDEO_PARAM_BITRATETYPE *)param);
- case OMX_IndexParamPortDefinition:
- {
- OMX_ERRORTYPE err = internalSetPortParams(
- (const OMX_PARAM_PORTDEFINITIONTYPE *)param);
-
- if (err != OMX_ErrorNone) {
- return err;
- }
-
- return SimpleSoftOMXComponent::internalSetParameter(index, param);
- }
-
- case OMX_IndexParamVideoPortFormat:
- return internalSetFormatParams(
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param);
-
case OMX_IndexParamVideoVp8:
return internalSetVp8Params(
(const OMX_VIDEO_PARAM_VP8TYPE *)param);
@@ -585,27 +433,8 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
return internalSetAndroidVp8Params(
(const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
- case OMX_IndexParamVideoProfileLevelCurrent:
- return internalSetProfileLevel(
- (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
-
- case kStoreMetaDataExtensionIndex:
- {
- // storeMetaDataInBuffers
- const StoreMetaDataInBuffersParams *storeParam =
- (const StoreMetaDataInBuffersParams *)param;
-
- if (storeParam->nPortIndex != kInputPortIndex) {
- return OMX_ErrorBadPortIndex;
- }
-
- mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, param);
}
}
@@ -646,29 +475,6 @@ OMX_ERRORTYPE SoftVPXEncoder::setConfig(
}
}
-OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel(
- const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) {
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) {
- return OMX_ErrorBadParameter;
- }
-
- if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) {
- mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel;
- } else {
- return OMX_ErrorBadParameter;
- }
-
- return OMX_ErrorNone;
-}
-
-
OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
if (vp8Params->nPortIndex != kOutputPortIndex) {
@@ -743,91 +549,6 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params(
return OMX_ErrorNone;
}
-OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
- const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
- if (format->nPortIndex == kInputPortIndex) {
- if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
- format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
- format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
- mColorFormat = format->eColorFormat;
-
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.eColorFormat = mColorFormat;
-
- return OMX_ErrorNone;
- } else {
- ALOGE("Unsupported color format %i", format->eColorFormat);
- return OMX_ErrorUnsupportedSetting;
- }
- } else if (format->nPortIndex == kOutputPortIndex) {
- if (format->eCompressionFormat == OMX_VIDEO_CodingVP8) {
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorUnsupportedSetting;
- }
- } else {
- return OMX_ErrorBadPortIndex;
- }
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams(
- const OMX_PARAM_COMPONENTROLETYPE* role) {
- const char* roleText = (const char*)role->cRole;
- const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1;
-
- if (strncmp(roleText, "video_encoder.vp8", roleTextMaxSize)) {
- ALOGE("Unsupported component role");
- return OMX_ErrorBadParameter;
- }
-
- return OMX_ErrorNone;
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
- const OMX_PARAM_PORTDEFINITIONTYPE* port) {
- if (port->nPortIndex == kInputPortIndex) {
- mWidth = port->format.video.nFrameWidth;
- mHeight = port->format.video.nFrameHeight;
-
- // xFramerate comes in Q16 format, in frames per second unit
- mFramerate = port->format.video.xFramerate;
-
- if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
- port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
- port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
- mColorFormat = port->format.video.eColorFormat;
- } else {
- return OMX_ErrorUnsupportedSetting;
- }
-
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.xFramerate = mFramerate;
- def->format.video.eColorFormat = mColorFormat;
- def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
-
- return OMX_ErrorNone;
- } else if (port->nPortIndex == kOutputPortIndex) {
- mBitrate = port->format.video.nBitrate;
- mWidth = port->format.video.nFrameWidth;
- mHeight = port->format.video.nFrameHeight;
-
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nBitrate = mBitrate;
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorBadPortIndex;
- }
-}
-
-
OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
if (bitrate->nPortIndex != kOutputPortIndex) {
@@ -916,7 +637,7 @@ vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
return flags;
}
-void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
// Initialize encoder if not already
if (mCodecContext == NULL) {
if (OK != initEncoder()) {
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index f4c1564..cd0a0cf 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -155,18 +155,6 @@ private:
// that specifies algorithm interface (e.g. vp8)
vpx_codec_iface_t* mCodecInterface;
- // Width of the input frames
- int32_t mWidth;
-
- // Height of the input frames
- int32_t mHeight;
-
- // Target bitrate set for the encoder, in bits per second.
- uint32_t mBitrate;
-
- // Target framerate set for the encoder.
- uint32_t mFramerate;
-
// If a request for a change it bitrate has been received.
bool mBitrateUpdated;
@@ -182,9 +170,6 @@ private:
// is enabled in encoder
OMX_BOOL mErrorResilience;
- // Color format for the input port
- OMX_COLOR_FORMATTYPE mColorFormat;
-
// Encoder profile corresponding to OMX level parameter
//
// The inconsistency in the naming is caused by
@@ -229,14 +214,8 @@ private:
// indeed YUV420SemiPlanar.
uint8_t* mConversionBuffer;
- bool mInputDataIsMeta;
-
bool mKeyFrameRequested;
- // Initializes input and output OMX ports with sensible
- // default values.
- void initPorts();
-
// Initializes vpx encoder with available settings.
status_t initEncoder();
@@ -250,23 +229,10 @@ private:
// Get current encode flags
vpx_enc_frame_flags_t getEncodeFlags();
- // Handles port changes with respect to color formats
- OMX_ERRORTYPE internalSetFormatParams(
- const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
-
- // Verifies the component role tried to be set to this OMX component is
- // strictly video_encoder.vp8
- OMX_ERRORTYPE internalSetRoleParams(
- const OMX_PARAM_COMPONENTROLETYPE* role);
-
// Updates bitrate to reflect port settings.
OMX_ERRORTYPE internalSetBitrateParams(
const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
- // Handles port definition changes.
- OMX_ERRORTYPE internalSetPortParams(
- const OMX_PARAM_PORTDEFINITIONTYPE* port);
-
// Handles vp8 specific parameters.
OMX_ERRORTYPE internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
@@ -275,10 +241,6 @@ private:
OMX_ERRORTYPE internalSetAndroidVp8Params(
const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
- // Updates encoder profile
- OMX_ERRORTYPE internalSetProfileLevel(
- const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
};
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 168208f..6b8b395 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -64,9 +64,11 @@ SoftAVC::SoftAVC(
mHeadersDecoded(false),
mEOSStatus(INPUT_DATA_AVAILABLE),
mSignalledError(false) {
+ const size_t kMinCompressionRatio = 2;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
initPorts(
- kNumInputBuffers, 8192 /* inputBufferSize */,
- kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
+ kNumInputBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* minInputBufferSize */,
+ kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC, kMinCompressionRatio);
CHECK_EQ(initDecoder(), (status_t)OK);
}
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
index f599004..85f6615 100644
--- a/media/libstagefright/data/media_codecs_google_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -48,7 +48,7 @@
</MediaCodec>
<MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis">
<Limit name="channel-count" max="8" />
- <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000,96000" />
+ <Limit name="sample-rate" ranges="8000-96000" />
<Limit name="bitrate" range="32000-500000" />
</MediaCodec>
<MediaCodec name="OMX.google.opus.decoder" type="audio/opus">
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index 1cbef39..7e9fa18 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -73,7 +73,7 @@
<Encoders>
<MediaCodec name="OMX.google.h263.encoder" type="video/3gpp">
<!-- profiles and levels: ProfileBaseline : Level45 -->
- <Limit name="size" min="16x16" max="176x144" />
+ <Limit name="size" min="176x144" max="176x144" />
<Limit name="alignment" value="16x16" />
<Limit name="bitrate" range="1-128000" />
</MediaCodec>
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp
new file mode 100644
index 0000000..ec4a960
--- /dev/null
+++ b/media/libstagefright/foundation/ADebug.cpp
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <ctype.h>
+
+#define LOG_TAG "ADebug"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <cutils/properties.h>
+
+#include <ADebug.h>
+#include <AStringUtils.h>
+#include <AUtils.h>
+
+namespace android {
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromString(
+ const char *name, const char *value, ADebug::Level def) {
+ // split on ,
+ const char *next = value, *current;
+ const unsigned long maxLevel = (unsigned long)kDebugMax;
+ while (next != NULL) {
+ current = next;
+ next = strchr(current, ',');
+ if (next != NULL) {
+ ++next; // pass ,
+ }
+
+ while (isspace(*current)) {
+ ++current;
+ }
+ // check for :
+ char *colon = strchr(current, ':');
+
+ // get level
+ char *end;
+ errno = 0; // strtoul does not clear errno, but it can be set for any return value
+ unsigned long level = strtoul(current, &end, 10);
+ while (isspace(*end)) {
+ ++end;
+ }
+ if (errno != 0 || end == current || (end != colon && *end != '\0' && end != next)) {
+ // invalid level - skip
+ continue;
+ }
+ if (colon != NULL) {
+ // check if pattern matches
+ do { // skip colon and spaces
+ ++colon;
+ } while (isspace(*colon));
+ size_t globLen = (next == NULL ? strlen(colon) : (next - 1 - colon));
+ while (globLen > 0 && isspace(colon[globLen - 1])) {
+ --globLen; // trim glob
+ }
+
+ if (!AStringUtils::MatchesGlob(
+ colon, globLen, name, strlen(name), true /* ignoreCase */)) {
+ continue;
+ }
+ }
+
+ // update debug level
+ def = (Level)min(level, maxLevel);
+ }
+ return def;
+}
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromProperty(
+ const char *name, const char *propertyName, ADebug::Level def) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get(propertyName, value, NULL)) {
+ return GetDebugLevelFromString(name, value, def);
+ }
+ return def;
+}
+
+//static
+char *ADebug::GetDebugName(const char *name) {
+ char *debugName = strdup(name);
+ const char *terms[] = { "omx", "video", "audio" };
+ for (size_t i = 0; i < NELEM(terms) && debugName != NULL; i++) {
+ const char *term = terms[i];
+ const size_t len = strlen(term);
+ char *match = strcasestr(debugName, term);
+ if (match != NULL && (match == debugName || match[-1] == '.'
+ || match[len] == '.' || match[len] == '\0')) {
+ char *src = match + len;
+ if (match == debugName || match[-1] == '.') {
+ src += (*src == '.'); // remove trailing or double .
+ }
+ memmove(match, src, debugName + strlen(debugName) - src + 1);
+ }
+ }
+
+ return debugName;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/AStringUtils.cpp b/media/libstagefright/foundation/AStringUtils.cpp
new file mode 100644
index 0000000..e5a846c
--- /dev/null
+++ b/media/libstagefright/foundation/AStringUtils.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <AStringUtils.h>
+
+namespace android {
+
+// static
+int AStringUtils::Compare(const char *a, const char *b, size_t len, bool ignoreCase) {
+ // this method relies on a trailing '\0' if a or b are shorter than len
+ return ignoreCase ? strncasecmp(a, b, len) : strncmp(a, b, len);
+}
+
+// static
+bool AStringUtils::MatchesGlob(
+ const char *glob, size_t globLen, const char *str, size_t strLen, bool ignoreCase) {
+ // this method does not assume a trailing '\0'
+ size_t ix = 0, globIx = 0;
+
+ // pattern must match until first '*'
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ if (strLen < globIx || Compare(str, glob, globIx /* len */, ignoreCase)) {
+ return false;
+ }
+ ix = globIx;
+
+ // process by * separated sections
+ while (globIx < globLen) {
+ ++globIx;
+ size_t start = globIx;
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ size_t len = globIx - start;
+ const char *pattern = glob + start;
+
+ if (globIx == globLen) {
+ // last pattern must match tail
+ if (ix + len > strLen) {
+ return false;
+ }
+ const char *tail = str + strLen - len;
+ return !Compare(tail, pattern, len, ignoreCase);
+ }
+ // progress after first occurrence of pattern
+ while (ix + len <= strLen && Compare(str + ix, pattern, len, ignoreCase)) {
+ ++ix;
+ }
+ if (ix + len > strLen) {
+ return false;
+ }
+ ix += len;
+ // we will loop around as globIx < globLen
+ }
+
+ // we only get here if there were no * in the pattern
+ return ix == strLen;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index 90a6a23..c1dd6ce 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -5,6 +5,7 @@ LOCAL_SRC_FILES:= \
AAtomizer.cpp \
ABitReader.cpp \
ABuffer.cpp \
+ ADebug.cpp \
AHandler.cpp \
AHierarchicalStateMachine.cpp \
ALooper.cpp \
@@ -12,6 +13,7 @@ LOCAL_SRC_FILES:= \
AMessage.cpp \
ANetworkSession.cpp \
AString.cpp \
+ AStringUtils.cpp \
ParsedMessage.cpp \
base64.cpp \
hexdump.cpp
@@ -22,6 +24,7 @@ LOCAL_C_INCLUDES:= \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libutils \
+ libcutils \
liblog
LOCAL_CFLAGS += -Wno-multichar -Werror
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index a934e72..6522ad7 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1165,6 +1165,14 @@ status_t LiveSession::selectTrack(size_t index, bool select) {
return err;
}
+ssize_t LiveSession::getSelectedTrack(media_track_type type) const {
+ if (mPlaylist == NULL) {
+ return -1;
+ } else {
+ return mPlaylist->getSelectedTrack(type);
+ }
+}
+
bool LiveSession::canSwitchUp() {
// Allow upwards bandwidth switch when a stream has buffered at least 10 seconds.
status_t err = OK;
@@ -1458,7 +1466,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
extra->setInt64("timeUs", timeUs);
discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
discontinuityQueue->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, extra, true);
+ ATSParser::DISCONTINUITY_TIME, extra, true);
} else {
int32_t type;
sp<AMessage> meta;
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 7aacca6..896a8fc 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -19,6 +19,7 @@
#define LIVE_SESSION_H_
#include <media/stagefright/foundation/AHandler.h>
+#include <media/mediaplayer.h>
#include <utils/String8.h>
@@ -73,6 +74,7 @@ struct LiveSession : public AHandler {
size_t getTrackCount() const;
sp<AMessage> getTrackInfo(size_t trackIndex) const;
status_t selectTrack(size_t index, bool select);
+ ssize_t getSelectedTrack(media_track_type /* type */) const;
bool isSeekable() const;
bool hasDynamicDuration() const;
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 1651dee..eb62c7a 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -66,6 +66,9 @@ protected:
virtual ~MediaGroup();
private:
+
+ friend struct M3UParser;
+
struct Media {
AString mName;
AString mURI;
@@ -356,6 +359,38 @@ ssize_t M3UParser::getSelectedIndex() const {
return mSelectedIndex;
}
+ssize_t M3UParser::getSelectedTrack(media_track_type type) const {
+ MediaGroup::Type groupType;
+ switch (type) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ groupType = MediaGroup::TYPE_VIDEO;
+ break;
+
+ case MEDIA_TRACK_TYPE_AUDIO:
+ groupType = MediaGroup::TYPE_AUDIO;
+ break;
+
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ groupType = MediaGroup::TYPE_SUBS;
+ break;
+
+ default:
+ return -1;
+ }
+
+ for (size_t i = 0, ii = 0; i < mMediaGroups.size(); ++i) {
+ sp<MediaGroup> group = mMediaGroups.valueAt(i);
+ size_t tracks = group->countTracks();
+ if (groupType != group->mType) {
+ ii += tracks;
+ } else if (group->mSelectedIndex >= 0) {
+ return ii + group->mSelectedIndex;
+ }
+ }
+
+ return -1;
+}
+
bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
if (!mIsVariantPlaylist) {
*uri = mBaseURI;
diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h
index d588afe..1cad060 100644
--- a/media/libstagefright/httplive/M3UParser.h
+++ b/media/libstagefright/httplive/M3UParser.h
@@ -21,6 +21,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
+#include <media/mediaplayer.h>
#include <utils/Vector.h>
namespace android {
@@ -46,6 +47,7 @@ struct M3UParser : public RefBase {
size_t getTrackCount() const;
sp<AMessage> getTrackInfo(size_t index) const;
ssize_t getSelectedIndex() const;
+ ssize_t getSelectedTrack(media_track_type /* type */) const;
bool getTypeURI(size_t index, const char *key, AString *uri) const;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 2873fc4..c0c7ed9 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -699,8 +699,7 @@ status_t PlaylistFetcher::refreshPlaylist() {
mRefreshState = (RefreshState)(mRefreshState + 1);
}
} else {
- ALOGE("failed to load playlist at url '%s'", mURI.c_str());
- notifyError(ERROR_IO);
+ ALOGE("failed to load playlist at url '%s'", uriDebugString(mURI).c_str());
return ERROR_IO;
}
} else {
@@ -723,26 +722,25 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {
}
void PlaylistFetcher::onDownloadNext() {
- if (refreshPlaylist() != OK) {
- return;
- }
-
- int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
- "media-sequence", &firstSeqNumberInPlaylist)) {
- firstSeqNumberInPlaylist = 0;
- }
-
+ status_t err = refreshPlaylist();
+ int32_t firstSeqNumberInPlaylist = 0;
+ int32_t lastSeqNumberInPlaylist = 0;
bool discontinuity = false;
- const int32_t lastSeqNumberInPlaylist =
- firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+ if (mPlaylist != NULL) {
+ if (mPlaylist->meta() != NULL) {
+ mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist);
+ }
+
+ lastSeqNumberInPlaylist =
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
- if (mDiscontinuitySeq < 0) {
- mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ if (mDiscontinuitySeq < 0) {
+ mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ }
}
- if (mSeqNumber < 0) {
+ if (mPlaylist != NULL && mSeqNumber < 0) {
CHECK_GE(mStartTimeUs, 0ll);
if (mSegmentStartTimeUs < 0) {
@@ -784,19 +782,26 @@ void PlaylistFetcher::onDownloadNext() {
}
}
+ // if mPlaylist is NULL then err must be non-OK; but the other way around might not be true
if (mSeqNumber < firstSeqNumberInPlaylist
- || mSeqNumber > lastSeqNumberInPlaylist) {
- if (!mPlaylist->isComplete() && mNumRetries < kMaxNumRetries) {
+ || mSeqNumber > lastSeqNumberInPlaylist
+ || err != OK) {
+ if ((err != OK || !mPlaylist->isComplete()) && mNumRetries < kMaxNumRetries) {
++mNumRetries;
- if (mSeqNumber > lastSeqNumberInPlaylist) {
+ if (mSeqNumber > lastSeqNumberInPlaylist || err != OK) {
+ // make sure we reach this retry logic on refresh failures
+ // by adding an err != OK clause to all enclosing if's.
+
// refresh in increasing fraction (1/2, 1/3, ...) of the
// playlist's target duration or 3 seconds, whichever is less
- int32_t targetDurationSecs;
- CHECK(mPlaylist->meta()->findInt32(
- "target-duration", &targetDurationSecs));
- int64_t delayUs = mPlaylist->size() * targetDurationSecs *
- 1000000ll / (1 + mNumRetries);
+ int64_t delayUs = kMaxMonitorDelayUs;
+ if (mPlaylist != NULL && mPlaylist->meta() != NULL) {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ delayUs = mPlaylist->size() * targetDurationSecs *
+ 1000000ll / (1 + mNumRetries);
+ }
if (delayUs > kMaxMonitorDelayUs) {
delayUs = kMaxMonitorDelayUs;
}
@@ -808,13 +813,30 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- // we've missed the boat, let's start from the lowest sequence
+ if (err != OK) {
+ notifyError(err);
+ return;
+ }
+
+ // we've missed the boat, let's start 3 segments prior to the latest sequence
// number available and signal a discontinuity.
ALOGI("We've missed the boat, restarting playback."
" mStartup=%d, was looking for %d in %d-%d",
mStartup, mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist);
+ if (mStopParams != NULL) {
+ // we should have kept on fetching until we hit the boundaries in mStopParams,
+ // but since the segments we are supposed to fetch have already rolled off
+ // the playlist, i.e. we have already missed the boat, we inevitably have to
+ // skip.
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ sp<ABuffer> formatChange = mSession->createFormatChangeBuffer();
+ mPacketSources.valueAt(i)->queueAccessUnit(formatChange);
+ }
+ stopAsync(/* clear = */ false);
+ return;
+ }
mSeqNumber = lastSeqNumberInPlaylist - 3;
if (mSeqNumber < firstSeqNumberInPlaylist) {
mSeqNumber = firstSeqNumberInPlaylist;
@@ -962,8 +984,8 @@ void PlaylistFetcher::onDownloadNext() {
} while (bytesRead != 0);
if (bufferStartsWithTsSyncByte(buffer)) {
- // If we still don't see a stream after fetching a full ts segment mark it as
- // nonexistent.
+ // If we don't see a stream in the program table after fetching a full ts segment
+ // mark it as nonexistent.
const size_t kNumTypes = ATSParser::NUM_SOURCE_TYPES;
ATSParser::SourceType srcTypes[kNumTypes] =
{ ATSParser::VIDEO, ATSParser::AUDIO };
@@ -978,7 +1000,7 @@ void PlaylistFetcher::onDownloadNext() {
static_cast<AnotherPacketSource *>(
mTSParser->getSource(srcType).get());
- if (source == NULL) {
+ if (!mTSParser->hasSource(srcType)) {
ALOGW("MPEG2 Transport stream does not contain %s data.",
srcType == ATSParser::VIDEO ? "video" : "audio");
@@ -995,7 +1017,7 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- status_t err = OK;
+ err = OK;
if (tsBuffer != NULL) {
AString method;
CHECK(buffer->meta()->findString("cipher-method", &method));
@@ -1154,7 +1176,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0);
mTSParser->signalDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, extra);
+ ATSParser::DISCONTINUITY_TIME, extra);
mAbsoluteTimeAnchorUs = mNextPTSTimeUs;
mNextPTSTimeUs = -1ll;
@@ -1255,6 +1277,11 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
if (mStartTimeUsNotify != NULL && timeUs > mStartTimeUs) {
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
int32_t targetDurationSecs;
CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
@@ -1265,6 +1292,8 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
// mStartTimeUs.
// mSegmentStartTimeUs >= 0
// mSegmentStartTimeUs is non-negative when adapting or switching tracks
+ // mSeqNumber > firstSeqNumberInPlaylist
+ // don't decrement mSeqNumber if it already points to the 1st segment
// timeUs - mStartTimeUs > targetDurationUs:
// This and the 2 above conditions should only happen when adapting in a live
// stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
@@ -1274,6 +1303,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
// stop as early as possible. The definition of being "too far ahead" is
// arbitrary; here we use targetDurationUs as threshold.
if (mStartup && mSegmentStartTimeUs >= 0
+ && mSeqNumber > firstSeqNumberInPlaylist
&& timeUs - mStartTimeUs > targetDurationUs) {
// we just guessed a starting timestamp that is too high when adapting in a
// live stream; re-adjust based on the actual timestamp extracted from the
@@ -1586,6 +1616,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO);
mStartTimeUsNotify->post();
mStartTimeUsNotify.clear();
+ mStartup = false;
}
}
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 24d431c..104dcfc 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -31,7 +31,7 @@ struct GraphicBufferSource;
struct OMXNodeInstance {
OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer);
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name);
void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle);
@@ -149,6 +149,18 @@ private:
KeyedVector<OMX_BUFFERHEADERTYPE *, OMX::buffer_id> mBufferHeaderToBufferID;
#endif
+ // For debug support
+ char *mName;
+ int DEBUG;
+ size_t mNumPortBuffers[2]; // modified under mLock, read outside for debug
+ Mutex mDebugLock;
+ // following are modified and read under mDebugLock
+ int DEBUG_BUMP;
+ SortedVector<OMX_BUFFERHEADERTYPE *> mInputBuffersWithCodec, mOutputBuffersWithCodec;
+ size_t mDebugLevelBumpPendingBuffers[2];
+ void bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers);
+ void unbumpDebugLevel_l(size_t portIndex);
+
~OMXNodeInstance();
void addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id);
@@ -186,6 +198,10 @@ private:
OMX_U32 portIndex, OMX_BOOL enable,
OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta);
+ status_t emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr);
+
sp<GraphicBufferSource> getGraphicBufferSource();
void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
index 9e97ebd..4529007 100644
--- a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
@@ -61,9 +61,10 @@ protected:
void initPorts(OMX_U32 numInputBuffers,
OMX_U32 inputBufferSize,
OMX_U32 numOutputBuffers,
- const char *mimeType);
+ const char *mimeType,
+ OMX_U32 minCompressionRatio = 1u);
- virtual void updatePortDefinitions(bool updateCrop = true);
+ virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
uint32_t outputBufferWidth();
uint32_t outputBufferHeight();
@@ -99,6 +100,9 @@ protected:
} mOutputPortSettingsChange;
private:
+ uint32_t mMinInputBufferSize;
+ uint32_t mMinCompressionRatio;
+
const char *mComponentRole;
OMX_VIDEO_CODINGTYPE mCodingType;
const CodecProfileLevel *mProfileLevels;
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
index b3b810d..b43635d 100644
--- a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
@@ -18,6 +18,8 @@
#define SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+#include <media/IOMX.h>
+
#include "SimpleSoftOMXComponent.h"
#include <system/window.h>
@@ -28,11 +30,26 @@ namespace android {
struct SoftVideoEncoderOMXComponent : public SimpleSoftOMXComponent {
SoftVideoEncoderOMXComponent(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
+ int32_t width,
+ int32_t height,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component);
+ virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR param);
+ virtual OMX_ERRORTYPE internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params);
+
protected:
+ void initPorts(
+ OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+ const char *mime, OMX_U32 minCompressionRatio = 1);
+
+ static void setRawVideoSize(OMX_PARAM_PORTDEFINITIONTYPE *def);
+
static void ConvertFlexYUVToPlanar(
uint8_t *dst, size_t dstStride, size_t dstVStride,
struct android_ycbcr *ycbcr, int32_t width, int32_t height);
@@ -56,9 +73,30 @@ protected:
kOutputPortIndex = 1,
};
+ bool mInputDataIsMeta;
+ int32_t mWidth; // width of the input frames
+ int32_t mHeight; // height of the input frames
+ uint32_t mBitrate; // target bitrate set for the encoder, in bits per second
+ uint32_t mFramerate; // target framerate set for the encoder, in Q16 format
+ OMX_COLOR_FORMATTYPE mColorFormat; // Color format for the input port
+
private:
+ void updatePortParams();
+ OMX_ERRORTYPE internalSetPortParams(const OMX_PARAM_PORTDEFINITIONTYPE* port);
+
+ static const uint32_t kInputBufferAlignment = 1;
+ static const uint32_t kOutputBufferAlignment = 2;
+
mutable const hw_module_t *mGrallocModule;
+ uint32_t mMinOutputBufferSize;
+ uint32_t mMinCompressionRatio;
+
+ const char *mComponentRole;
+ OMX_VIDEO_CODINGTYPE mCodingType;
+ const CodecProfileLevel *mProfileLevels;
+ size_t mNumProfileLevels;
+
DISALLOW_EVIL_CONSTRUCTORS(SoftVideoEncoderOMXComponent);
};
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 89ffa0c..0712bf0 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -413,16 +413,16 @@ void BlockIterator::seek(
const mkvparser::CuePoint* pCP;
mkvparser::Tracks const *pTracks = pSegment->GetTracks();
- unsigned long int trackCount = pTracks->GetTracksCount();
while (!pCues->DoneParsing()) {
pCues->LoadCuePoint();
pCP = pCues->GetLast();
CHECK(pCP);
+ size_t trackCount = mExtractor->mTracks.size();
for (size_t index = 0; index < trackCount; ++index) {
- const mkvparser::Track *pTrack = pTracks->GetTrackByIndex(index);
+ MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(index);
+ const mkvparser::Track *pTrack = pTracks->GetTrackByNumber(track.mTrackNum);
if (pTrack && pTrack->GetType() == 1 && pCP->Find(pTrack)) { // VIDEO_TRACK
- MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(index);
track.mCuePoints.push_back(pCP);
}
}
@@ -434,12 +434,13 @@ void BlockIterator::seek(
}
const mkvparser::CuePoint::TrackPosition *pTP = NULL;
- const mkvparser::Track *thisTrack = pTracks->GetTrackByIndex(mIndex);
+ const mkvparser::Track *thisTrack = pTracks->GetTrackByNumber(mTrackNum);
if (thisTrack->GetType() == 1) { // video
MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(mIndex);
pTP = track.find(seekTimeNs);
} else {
// The Cue index is built around video keyframes
+ unsigned long int trackCount = pTracks->GetTracksCount();
for (size_t index = 0; index < trackCount; ++index) {
const mkvparser::Track *pTrack = pTracks->GetTrackByIndex(index);
if (pTrack && pTrack->GetType() == 1 && pCues->Find(seekTimeNs, pTrack, pCP, pTP)) {
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 6d8866a..482ccff 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -63,6 +63,7 @@ struct ATSParser::Program : public RefBase {
void signalEOS(status_t finalResult);
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
int64_t convertPTSToTimestamp(uint64_t PTS);
@@ -119,6 +120,9 @@ struct ATSParser::Stream : public RefBase {
sp<MediaSource> getSource(SourceType type);
+ bool isAudio() const;
+ bool isVideo() const;
+
protected:
virtual ~Stream();
@@ -146,9 +150,6 @@ private:
void extractAACFrames(const sp<ABuffer> &buffer);
- bool isAudio() const;
- bool isVideo() const;
-
DISALLOW_EVIL_CONSTRUCTORS(Stream);
};
@@ -244,11 +245,16 @@ struct StreamInfo {
status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x02u);
-
+ if (table_id != 0x02u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
unsigned section_syntax_indicator = br->getBits(1);
ALOGV(" section_syntax_indicator = %u", section_syntax_indicator);
- CHECK_EQ(section_syntax_indicator, 1u);
+ if (section_syntax_indicator != 1u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
CHECK_EQ(br->getBits(1), 0u);
MY_LOGV(" reserved = %u", br->getBits(2));
@@ -435,6 +441,19 @@ sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::Program::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mStreams.size(); ++i) {
+ const sp<Stream> &stream = mStreams.valueAt(i);
+ if (type == AUDIO && stream->isAudio()) {
+ return true;
+ } else if (type == VIDEO && stream->isVideo()) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) {
if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) {
if (!mFirstPTSValid) {
@@ -660,7 +679,7 @@ void ATSParser::Stream::signalDiscontinuity(
int64_t resumeAtMediaTimeUs =
mProgram->convertPTSToTimestamp(resumeAtPTS);
- extra->setInt64("resume-at-mediatimeUs", resumeAtMediaTimeUs);
+ extra->setInt64("resume-at-mediaTimeUs", resumeAtMediaTimeUs);
}
}
@@ -739,8 +758,10 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
CHECK_GE(optional_bytes_remaining, 5u);
- CHECK_EQ(br->getBits(4), PTS_DTS_flags);
-
+ if (br->getBits(4) != PTS_DTS_flags) {
+ ALOGE("PES data Error!");
+ return ERROR_MALFORMED;
+ }
PTS = ((uint64_t)br->getBits(3)) << 30;
CHECK_EQ(br->getBits(1), 1u);
PTS |= ((uint64_t)br->getBits(15)) << 15;
@@ -1003,8 +1024,10 @@ void ATSParser::signalEOS(status_t finalResult) {
void ATSParser::parseProgramAssociationTable(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x00u);
-
+ if (table_id != 0x00u) {
+ ALOGE("PAT data error!");
+ return ;
+ }
unsigned section_syntax_indictor = br->getBits(1);
ALOGV(" section_syntax_indictor = %u", section_syntax_indictor);
CHECK_EQ(section_syntax_indictor, 1u);
@@ -1074,7 +1097,9 @@ status_t ATSParser::parsePID(
sp<PSISection> section = mPSISections.valueAt(sectionIndex);
if (payload_unit_start_indicator) {
- CHECK(section->isEmpty());
+ if (!section->isEmpty()) {
+ return ERROR_UNSUPPORTED;
+ }
unsigned skip = br->getBits(8);
br->skipBits(skip * 8);
@@ -1203,7 +1228,10 @@ status_t ATSParser::parseTS(ABitReader *br) {
ALOGV("---");
unsigned sync_byte = br->getBits(8);
- CHECK_EQ(sync_byte, 0x47u);
+ if (sync_byte != 0x47u) {
+ ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
+ return BAD_VALUE;
+ }
if (br->getBits(1)) { // transport_error_indicator
// silently ignore.
@@ -1264,6 +1292,17 @@ sp<MediaSource> ATSParser::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mPrograms.size(); ++i) {
+ const sp<Program> &program = mPrograms.itemAt(i);
+ if (program->hasSource(type)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
bool ATSParser::PTSTimeDeltaEstablished() {
if (mPrograms.isEmpty()) {
return false;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index b5cc6ed..75d76dc 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -41,8 +41,6 @@ struct ATSParser : public RefBase {
DISCONTINUITY_ABSOLUTE_TIME = 8,
DISCONTINUITY_TIME_OFFSET = 16,
- DISCONTINUITY_SEEK = DISCONTINUITY_TIME,
-
// For legacy reasons this also implies a time discontinuity.
DISCONTINUITY_FORMATCHANGE =
DISCONTINUITY_AUDIO_FORMAT
@@ -76,6 +74,7 @@ struct ATSParser : public RefBase {
NUM_SOURCE_TYPES = 2
};
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
bool PTSTimeDeltaEstablished();
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index a03f6f9..c579d4c 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -265,8 +265,12 @@ void AnotherPacketSource::queueDiscontinuity(
mEOSResult = OK;
mLastQueuedTimeUs = 0;
mLatestEnqueuedMeta = NULL;
- ++mQueuedDiscontinuityCount;
+ if (type == ATSParser::DISCONTINUITY_NONE) {
+ return;
+ }
+
+ ++mQueuedDiscontinuityCount;
sp<ABuffer> buffer = new ABuffer(0);
buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
buffer->meta()->setMessage("extra", extra);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 459591d..73fe109 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -170,8 +170,9 @@ static bool IsSeeminglyValidAC3Header(const uint8_t *ptr, size_t size) {
return parseAC3SyncFrame(ptr, size, NULL) > 0;
}
-static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
- if (size < 3) {
+static bool IsSeeminglyValidADTSHeader(
+ const uint8_t *ptr, size_t size, size_t *frameLength) {
+ if (size < 7) {
// Not enough data to verify header.
return false;
}
@@ -194,6 +195,13 @@ static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
return false;
}
+ size_t frameLengthInHeader =
+ ((ptr[3] & 3) << 11) + (ptr[4] << 3) + ((ptr[5] >> 5) & 7);
+ if (frameLengthInHeader > size) {
+ return false;
+ }
+
+ *frameLength = frameLengthInHeader;
return true;
}
@@ -315,8 +323,10 @@ status_t ElementaryStreamQueue::appendData(
}
#else
ssize_t startOffset = -1;
+ size_t frameLength;
for (size_t i = 0; i < size; ++i) {
- if (IsSeeminglyValidADTSHeader(&ptr[i], size - i)) {
+ if (IsSeeminglyValidADTSHeader(
+ &ptr[i], size - i, &frameLength)) {
startOffset = i;
break;
}
@@ -332,6 +342,12 @@ status_t ElementaryStreamQueue::appendData(
startOffset);
}
+ if (frameLength != size - startOffset) {
+ ALOGW("First ADTS AAC frame length is %zd bytes, "
+ "while the buffer size is %zd bytes.",
+ frameLength, size - startOffset);
+ }
+
data = &ptr[startOffset];
size -= startOffset;
#endif
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 3e70956..44c7edc 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -750,7 +750,7 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(
}
// BufferQueue::ConsumerListener callback
-void GraphicBufferSource::onFrameAvailable() {
+void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock autoLock(mMutex);
ALOGV("onFrameAvailable exec=%d avail=%zu",
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index c0860ab..c8e3775 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -137,7 +137,7 @@ protected:
// into the codec buffer, and call Empty[This]Buffer. If we're not yet
// executing or there's no codec buffer available, we just increment
// mNumFramesAvailable and return.
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// BufferQueue::ConsumerListener interface, called when the client has
// released one or more GraphicBuffers. We clear out the appropriate
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 43f0bc9..f8d38ff 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -225,7 +225,7 @@ status_t OMX::allocateNode(
*node = 0;
- OMXNodeInstance *instance = new OMXNodeInstance(this, observer);
+ OMXNodeInstance *instance = new OMXNodeInstance(this, observer, name);
OMX_COMPONENTTYPE *handle;
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index d07ec14..c04d95f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -18,11 +18,15 @@
#define LOG_TAG "OMXNodeInstance"
#include <utils/Log.h>
+#include <inttypes.h>
+
#include "../include/OMXNodeInstance.h"
#include "OMXMaster.h"
#include "GraphicBufferSource.h"
#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AsString.h>
#include <binder/IMemory.h>
#include <gui/BufferQueue.h>
@@ -30,7 +34,68 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
static const OMX_U32 kPortIndexInput = 0;
+static const OMX_U32 kPortIndexOutput = 1;
+
+#define CLOGW(fmt, ...) ALOGW("[%x:%s] " fmt, mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_ERROR_IF(cond, fn, err, fmt, ...) \
+ ALOGE_IF(cond, #fn "(%x:%s, " fmt ") ERROR: %s(%#x)", \
+ mNodeID, mName, ##__VA_ARGS__, asString(err), err)
+#define CLOG_ERROR(fn, err, fmt, ...) CLOG_ERROR_IF(true, fn, err, fmt, ##__VA_ARGS__)
+#define CLOG_IF_ERROR(fn, err, fmt, ...) \
+ CLOG_ERROR_IF((err) != OMX_ErrorNone, fn, err, fmt, ##__VA_ARGS__)
+
+#define CLOGI_(level, fn, fmt, ...) \
+ ALOGI_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+#define CLOGD_(level, fn, fmt, ...) \
+ ALOGD_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_LIFE(fn, fmt, ...) CLOGI_(ADebug::kDebugLifeCycle, fn, fmt, ##__VA_ARGS__)
+#define CLOG_STATE(fn, fmt, ...) CLOGI_(ADebug::kDebugState, fn, fmt, ##__VA_ARGS__)
+#define CLOG_CONFIG(fn, fmt, ...) CLOGI_(ADebug::kDebugConfig, fn, fmt, ##__VA_ARGS__)
+#define CLOG_INTERNAL(fn, fmt, ...) CLOGD_(ADebug::kDebugInternalState, fn, fmt, ##__VA_ARGS__)
+
+#define CLOG_DEBUG_IF(cond, fn, fmt, ...) \
+ ALOGD_IF(cond, #fn "(%x, " fmt ")", mNodeID, ##__VA_ARGS__)
+
+#define CLOG_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+#define CLOG_BUMPED_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG_BUMP >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+
+/* buffer formatting */
+#define BUFFER_FMT(port, fmt, ...) "%s:%u " fmt, portString(port), (port), ##__VA_ARGS__
+#define NEW_BUFFER_FMT(buffer_id, port, fmt, ...) \
+ BUFFER_FMT(port, fmt ") (#%zu => %#x", ##__VA_ARGS__, mActiveBuffers.size(), (buffer_id))
+
+#define SIMPLE_BUFFER(port, size, data) BUFFER_FMT(port, "%zu@%p", (size), (data))
+#define SIMPLE_NEW_BUFFER(buffer_id, port, size, data) \
+ NEW_BUFFER_FMT(buffer_id, port, "%zu@%p", (size), (data))
+
+#define EMPTY_BUFFER(addr, header) "%#x [%u@%p]", \
+ (addr), (header)->nAllocLen, (header)->pBuffer
+#define FULL_BUFFER(addr, header) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld]", \
+ (intptr_t)(addr), (header)->nAllocLen, (header)->pBuffer, \
+ (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp
+
+#define WITH_STATS_WRAPPER(fmt, ...) fmt " { IN=%zu/%zu OUT=%zu/%zu }", ##__VA_ARGS__, \
+ mInputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexInput], \
+ mOutputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexOutput]
+// TRICKY: this is needed so formatting macros expand before substitution
+#define WITH_STATS(fmt, ...) WITH_STATS_WRAPPER(fmt, ##__VA_ARGS__)
+
+template<class T>
+static void InitOMXParams(T *params) {
+ memset(params, 0, sizeof(T));
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
namespace android {
@@ -56,8 +121,8 @@ struct BufferMeta {
}
memcpy((OMX_U8 *)mMem->pointer() + header->nOffset,
- header->pBuffer + header->nOffset,
- header->nFilledLen);
+ header->pBuffer + header->nOffset,
+ header->nFilledLen);
}
void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) {
@@ -66,8 +131,8 @@ struct BufferMeta {
}
memcpy(header->pBuffer + header->nOffset,
- (const OMX_U8 *)mMem->pointer() + header->nOffset,
- header->nFilledLen);
+ (const OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->nFilledLen);
}
void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
@@ -89,8 +154,17 @@ OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = {
&OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
};
+static inline const char *portString(OMX_U32 portIndex) {
+ switch (portIndex) {
+ case kPortIndexInput: return "Input";
+ case kPortIndexOutput: return "Output";
+ case ~0: return "All";
+ default: return "port";
+ }
+}
+
OMXNodeInstance::OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer)
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name)
: mOwner(owner),
mNodeID(0),
mHandle(NULL),
@@ -100,15 +174,25 @@ OMXNodeInstance::OMXNodeInstance(
, mBufferIDCount(0)
#endif
{
+ mName = ADebug::GetDebugName(name);
+ DEBUG = ADebug::GetDebugLevelFromProperty(name, "debug.stagefright.omx-debug");
+ ALOGV("debug level for %s is %d", name, DEBUG);
+ DEBUG_BUMP = DEBUG;
+ mNumPortBuffers[0] = 0;
+ mNumPortBuffers[1] = 0;
+ mDebugLevelBumpPendingBuffers[0] = 0;
+ mDebugLevelBumpPendingBuffers[1] = 0;
}
OMXNodeInstance::~OMXNodeInstance() {
+ free(mName);
CHECK(mHandle == NULL);
}
void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) {
- CHECK(mHandle == NULL);
mNodeID = node_id;
+ CLOG_LIFE(allocateNode, "handle=%p", handle);
+ CHECK(mHandle == NULL);
mHandle = handle;
}
@@ -120,6 +204,7 @@ sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() {
void OMXNodeInstance::setGraphicBufferSource(
const sp<GraphicBufferSource>& bufferSource) {
Mutex::Autolock autoLock(mGraphicBufferSourceLock);
+ CLOG_INTERNAL(setGraphicBufferSource, "%p", bufferSource.get());
mGraphicBufferSource = bufferSource;
}
@@ -140,6 +225,7 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
case OMX_ErrorNone:
return OK;
case OMX_ErrorUnsupportedSetting:
+ case OMX_ErrorUnsupportedIndex:
return ERROR_UNSUPPORTED;
default:
return UNKNOWN_ERROR;
@@ -147,8 +233,14 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
}
status_t OMXNodeInstance::freeNode(OMXMaster *master) {
+ CLOG_LIFE(freeNode, "handle=%p", mHandle);
static int32_t kMaxNumIterations = 10;
+ // exit if we have already freed the node
+ if (mHandle == NULL) {
+ return OK;
+ }
+
// Transition the node from its current state all the way down
// to "Loaded".
// This ensures that all active buffers are properly freed even
@@ -170,10 +262,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateIdle
- && state != OMX_StateInvalid) {
+ && state != OMX_StateIdle
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Idle state, aborting.");
+ CLOGW("failed to enter Idle state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -199,10 +292,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateLoaded
- && state != OMX_StateInvalid) {
+ && state != OMX_StateLoaded
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Loaded state, aborting.");
+ CLOGW("failed to enter Loaded state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -220,20 +314,18 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
break;
default:
- CHECK(!"should not be here, unknown state.");
+ LOG_ALWAYS_FATAL("unknown state %s(%#x).", asString(state), state);
break;
}
- ALOGV("calling destroyComponentInstance");
+ ALOGV("[%x:%s] calling destroyComponentInstance", mNodeID, mName);
OMX_ERRORTYPE err = master->destroyComponentInstance(
static_cast<OMX_COMPONENTTYPE *>(mHandle));
- ALOGV("destroyComponentInstance returned err %d", err);
mHandle = NULL;
-
- if (err != OMX_ErrorNone) {
- ALOGE("FreeHandle FAILED with error 0x%08x.", err);
- }
+ CLOG_IF_ERROR(freeNode, err, "");
+ free(mName);
+ mName = NULL;
mOwner->invalidateNodeID(mNodeID);
mNodeID = 0;
@@ -265,7 +357,17 @@ status_t OMXNodeInstance::sendCommand(
Mutex::Autolock autoLock(mLock);
+ // bump internal-state debug level for 2 input and output frames past a command
+ {
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+
+ const char *paramString =
+ cmd == OMX_CommandStateSet ? asString((OMX_STATETYPE)param) : portString(param);
+ CLOG_STATE(sendCommand, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL);
+ CLOG_IF_ERROR(sendCommand, err, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
return StatusFromOMXError(err);
}
@@ -274,17 +376,23 @@ status_t OMXNodeInstance::getParameter(
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
- ALOGE_IF(err != OMX_ErrorNone, "getParameter(%d) ERROR: %#x", index, err);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getParameter
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setParameter(
- OMX_INDEXTYPE index, const void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setParameter, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetParameter(
mHandle, index, const_cast<void *>(params));
- ALOGE_IF(err != OMX_ErrorNone, "setParameter(%d) ERROR: %#x", index, err);
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
@@ -293,16 +401,23 @@ status_t OMXNodeInstance::getConfig(
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetConfig(mHandle, index, params);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getConfig
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getConfig, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setConfig(
- OMX_INDEXTYPE index, const void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setConfig, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetConfig(
mHandle, index, const_cast<void *>(params));
-
+ CLOG_IF_ERROR(setConfig, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
@@ -310,13 +425,14 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) {
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetState(mHandle, state);
-
+ CLOG_IF_ERROR(getState, err, "");
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::enableGraphicBuffers(
OMX_U32 portIndex, OMX_BOOL enable) {
Mutex::Autolock autoLock(mLock);
+ CLOG_CONFIG(enableGraphicBuffers, "%s:%u, %d", portString(portIndex), portIndex, enable);
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.enableAndroidNativeBuffers");
@@ -324,32 +440,19 @@ status_t OMXNodeInstance::enableGraphicBuffers(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- if (enable) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
- }
-
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- EnableAndroidNativeBuffersParams params = {
- sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
- };
+ EnableAndroidNativeBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.enable = enable;
err = OMX_SetParameter(mHandle, index, &params);
-
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
- err, err);
-
- return UNKNOWN_ERROR;
- }
-
- return OK;
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d", name, index,
+ portString(portIndex), portIndex, enable);
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getGraphicBufferUsage(
@@ -362,26 +465,19 @@ status_t OMXNodeInstance::getGraphicBufferUsage(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- GetAndroidNativeBufferUsageParams params = {
- sizeof(GetAndroidNativeBufferUsageParams), ver, portIndex, 0,
- };
+ GetAndroidNativeBufferUsageParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
err = OMX_GetParameter(mHandle, index, &params);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetAndroidNativeBufferUsage failed with error %d (0x%08x)",
- err, err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u", name, index,
+ portString(portIndex), portIndex);
+ return StatusFromOMXError(err);
}
*usage = params.nUsage;
@@ -393,6 +489,7 @@ status_t OMXNodeInstance::storeMetaDataInBuffers(
OMX_U32 portIndex,
OMX_BOOL enable) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u en:%d", portString(portIndex), portIndex, enable);
return storeMetaDataInBuffers_l(
portIndex, enable,
OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */);
@@ -419,37 +516,42 @@ status_t OMXNodeInstance::storeMetaDataInBuffers_l(
: OMX_ErrorBadParameter;
if (err == OMX_ErrorNone) {
*usingGraphicBufferInMetadata = OMX_TRUE;
+ name = graphicBufferName;
} else {
- *usingGraphicBufferInMetadata = OMX_FALSE;
err = OMX_GetExtensionIndex(mHandle, name, &index);
}
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
- return StatusFromOMXError(err);
- }
-
- StoreMetaDataInBuffersParams params;
- memset(&params, 0, sizeof(params));
- params.nSize = sizeof(params);
+ OMX_ERRORTYPE xerr = err;
+ if (err == OMX_ErrorNone) {
+ StoreMetaDataInBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
- // Version: 1.0.0.0
- params.nVersion.s.nVersionMajor = 1;
+ err = OMX_SetParameter(mHandle, index, &params);
+ }
- params.nPortIndex = portIndex;
- params.bStoreMetaData = enable;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ // don't log loud error if component does not support metadata mode on the output
+ if (err != OMX_ErrorNone) {
*usingGraphicBufferInMetadata = OMX_FALSE;
- return UNKNOWN_ERROR;
+ if (err == OMX_ErrorUnsupportedIndex && portIndex == kPortIndexOutput) {
+ CLOGW("component does not support metadata mode; using fallback");
+ } else if (xerr != OMX_ErrorNone) {
+ CLOG_ERROR(getExtensionIndex, xerr, "%s", name);
+ } else {
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d GB=%d", name, index,
+ portString(portIndex), portIndex, enable, useGraphicBuffer);
+ }
}
- return err;
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth,
OMX_U32 maxFrameHeight) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(prepareForAdaptivePlayback, "%s:%u en=%d max=%ux%u",
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
@@ -457,33 +559,29 @@ status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name);
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
PrepareForAdaptivePlaybackParams params;
- params.nSize = sizeof(params);
- params.nVersion.s.nVersionMajor = 1;
- params.nVersion.s.nVersionMinor = 0;
- params.nVersion.s.nRevision = 0;
- params.nVersion.s.nStep = 0;
-
+ InitOMXParams(&params);
params.nPortIndex = portIndex;
params.bEnable = enable;
params.nMaxFrameWidth = maxFrameWidth;
params.nMaxFrameHeight = maxFrameHeight;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback "
- "with error %d (0x%08x)", err, err);
- return UNKNOWN_ERROR;
- }
- return err;
+
+ err = OMX_SetParameter(mHandle, index, &params);
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d max=%ux%u", name, index,
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::configureVideoTunnelMode(
OMX_U32 portIndex, OMX_BOOL tunneled, OMX_U32 audioHwSync,
native_handle_t **sidebandHandle) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(configureVideoTunnelMode, "%s:%u tun=%d sync=%u",
+ portString(portIndex), portIndex, tunneled, audioHwSync);
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
@@ -491,36 +589,33 @@ status_t OMXNodeInstance::configureVideoTunnelMode(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("configureVideoTunnelMode extension is missing!");
+ CLOG_ERROR_IF(tunneled, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
ConfigureVideoTunnelModeParams tunnelParams;
- tunnelParams.nSize = sizeof(tunnelParams);
- tunnelParams.nVersion.s.nVersionMajor = 1;
- tunnelParams.nVersion.s.nVersionMinor = 0;
- tunnelParams.nVersion.s.nRevision = 0;
- tunnelParams.nVersion.s.nStep = 0;
-
+ InitOMXParams(&tunnelParams);
tunnelParams.nPortIndex = portIndex;
tunnelParams.bTunneled = tunneled;
tunnelParams.nAudioHwSync = audioHwSync;
err = OMX_SetParameter(mHandle, index, &tunnelParams);
if (err != OMX_ErrorNone) {
- ALOGE("configureVideoTunnelMode failed! (err %d).", err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
}
err = OMX_GetParameter(mHandle, index, &tunnelParams);
if (err != OMX_ErrorNone) {
- ALOGE("GetVideoTunnelWindow failed! (err %d).", err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
}
if (sidebandHandle) {
*sidebandHandle = (native_handle_t*)tunnelParams.pSidebandWindow;
}
- return err;
+ return OK;
}
status_t OMXNodeInstance::useBuffer(
@@ -537,14 +632,14 @@ status_t OMXNodeInstance::useBuffer(
params->size(), static_cast<OMX_U8 *>(params->pointer()));
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -558,6 +653,8 @@ status_t OMXNodeInstance::useBuffer(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%zu@%p", params->size(), params->pointer()));
return OK;
}
@@ -567,17 +664,14 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
// port definition
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, OMX_IndexParamPortDefinition, &def);
- if (err != OMX_ErrorNone)
- {
- ALOGE("%s::%d:Error getting OMX_IndexParamPortDefinition", __FUNCTION__, __LINE__);
- return err;
+ if (err != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
}
BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
@@ -595,11 +689,11 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
bufferHandle);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, BUFFER_FMT(portIndex, "%u@%p", def.nBufferSize, bufferHandle));
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pBuffer, bufferHandle);
@@ -608,7 +702,8 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
*buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer2, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%u@%p", def.nBufferSize, bufferHandle));
return OK;
}
@@ -632,10 +727,8 @@ status_t OMXNodeInstance::useGraphicBuffer(
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.useAndroidNativeBuffer");
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
@@ -656,15 +749,15 @@ status_t OMXNodeInstance::useGraphicBuffer(
err = OMX_SetParameter(mHandle, index, &params);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
- err);
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u meta=%p GB=%p", name, index,
+ portString(portIndex), portIndex, bufferMeta, graphicBuffer->handle);
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, bufferMeta);
@@ -672,12 +765,13 @@ status_t OMXNodeInstance::useGraphicBuffer(
*buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "GB=%p", graphicBuffer->handle));
return OK;
}
status_t OMXNodeInstance::updateGraphicBufferInMeta(
- OMX_U32 /* portIndex */, const sp<GraphicBuffer>& graphicBuffer,
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
@@ -688,7 +782,8 @@ status_t OMXNodeInstance::updateGraphicBufferInMeta(
bufferMeta->setGraphicBuffer(graphicBuffer);
metadata->eType = kMetadataBufferTypeGrallocSource;
metadata->pHandle = graphicBuffer->handle;
-
+ CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x := %p",
+ portString(portIndex), portIndex, buffer, graphicBuffer->handle);
return OK;
}
@@ -714,19 +809,21 @@ status_t OMXNodeInstance::createInputSurface(
// Retrieve the width and height of the graphic buffer, set when the
// codec was configured.
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(def);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE oerr = OMX_GetParameter(
mHandle, OMX_IndexParamPortDefinition, &def);
- CHECK(oerr == OMX_ErrorNone);
+ if (oerr != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, oerr, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
+ }
if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) {
- ALOGE("createInputSurface requires COLOR_FormatSurface "
- "(AndroidOpaque) color format");
+ CLOGW("createInputSurface requires COLOR_FormatSurface "
+ "(AndroidOpaque) color format instead of %s(%#x)",
+ asString(def.format.video.eColorFormat), def.format.video.eColorFormat);
return INVALID_OPERATION;
}
@@ -749,9 +846,9 @@ status_t OMXNodeInstance::signalEndOfInputStream() {
// flag set). Seems easier than doing the equivalent from here.
sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
if (bufferSource == NULL) {
- ALOGW("signalEndOfInputStream can only be used with Surface input");
+ CLOGW("signalEndOfInputStream can only be used with Surface input");
return INVALID_OPERATION;
- };
+ }
return bufferSource->signalEndOfInputStream();
}
@@ -768,14 +865,13 @@ status_t OMXNodeInstance::allocateBuffer(
mHandle, &header, portIndex, buffer_meta, size);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBuffer, err, BUFFER_FMT(portIndex, "%zu@", size));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -789,6 +885,7 @@ status_t OMXNodeInstance::allocateBuffer(
if (bufferSource != NULL && portIndex == kPortIndexInput) {
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBuffer, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p", size, *buffer_data));
return OK;
}
@@ -806,14 +903,14 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
mHandle, &header, portIndex, buffer_meta, params->size());
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBufferWithBackup, err,
+ SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -827,12 +924,16 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %p",
+ params->size(), params->pointer(), header->pBuffer));
+
return OK;
}
status_t OMXNodeInstance::freeBuffer(
OMX_U32 portIndex, OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
+ CLOG_BUFFER(freeBuffer, "%s:%u %#x", portString(portIndex), portIndex, buffer);
removeActiveBuffer(portIndex, buffer);
@@ -840,6 +941,7 @@ status_t OMXNodeInstance::freeBuffer(
BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate);
OMX_ERRORTYPE err = OMX_FreeBuffer(mHandle, portIndex, header);
+ CLOG_IF_ERROR(freeBuffer, err, "%s:%u %#x", portString(portIndex), portIndex, buffer);
delete buffer_meta;
buffer_meta = NULL;
@@ -856,8 +958,18 @@ status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {
header->nOffset = 0;
header->nFlags = 0;
- OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.add(header);
+ CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header)));
+ }
+ OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header));
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(header);
+ }
return StatusFromOMXError(err);
}
@@ -870,14 +982,66 @@ status_t OMXNodeInstance::emptyBuffer(
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(header->pAppPrivate);
buffer_meta->CopyToOMX(header);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer);
+}
+
+// log queued buffer activity for the next few input and/or output frames
+// if logging at internal state level
+void OMXNodeInstance::bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers) {
+ if (DEBUG == ADebug::kDebugInternalState) {
+ DEBUG_BUMP = ADebug::kDebugAll;
+ if (numInputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexInput] = numInputBuffers;
+ }
+ if (numOutputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexOutput] = numOutputBuffers;
+ }
+ }
+}
+
+void OMXNodeInstance::unbumpDebugLevel_l(size_t portIndex) {
+ if (mDebugLevelBumpPendingBuffers[portIndex]) {
+ --mDebugLevelBumpPendingBuffers[portIndex];
+ }
+ if (!mDebugLevelBumpPendingBuffers[0]
+ && !mDebugLevelBumpPendingBuffers[1]) {
+ DEBUG_BUMP = DEBUG;
+ }
+}
+
+status_t OMXNodeInstance::emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr) {
+ header->nFlags = flags;
+ header->nTimeStamp = timestamp;
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.add(header);
+
+ // bump internal-state debug level for 2 input frames past a buffer with CSD
+ if ((flags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ bumpDebugLevel_l(2 /* numInputBuffers */, 0 /* numOutputBuffers */);
+ }
+
+ CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header)));
+ }
+
OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
+ CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header));
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ if (err != OMX_ErrorNone) {
+ mInputBuffersWithCodec.remove(header);
+ } else if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ unbumpDebugLevel_l(kPortIndexInput);
+ }
+ }
return StatusFromOMXError(err);
}
@@ -891,15 +1055,8 @@ status_t OMXNodeInstance::emptyDirectBuffer(
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
-
- OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
- if (err != OMX_ErrorNone) {
- ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err);
- }
- return StatusFromOMXError(err);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer);
}
status_t OMXNodeInstance::getExtensionIndex(
@@ -912,11 +1069,25 @@ status_t OMXNodeInstance::getExtensionIndex(
return StatusFromOMXError(err);
}
+inline static const char *asString(IOMX::InternalOptionType i, const char *def = "??") {
+ switch (i) {
+ case IOMX::INTERNAL_OPTION_SUSPEND: return "SUSPEND";
+ case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
+ return "REPEAT_PREVIOUS_FRAME_DELAY";
+ case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP";
+ case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME";
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE";
+ default: return def;
+ }
+}
+
status_t OMXNodeInstance::setInternalOption(
OMX_U32 portIndex,
IOMX::InternalOptionType type,
const void *data,
size_t size) {
+ CLOG_CONFIG(setInternalOption, "%s(%d): %s:%u %zu@%p",
+ asString(type), type, portString(portIndex), portIndex, size, data);
switch (type) {
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
@@ -928,6 +1099,7 @@ status_t OMXNodeInstance::setInternalOption(
getGraphicBufferSource();
if (bufferSource == NULL || portIndex != kPortIndexInput) {
+ CLOGW("setInternalOption is only for Surface input");
return ERROR_UNSUPPORTED;
}
@@ -937,6 +1109,7 @@ status_t OMXNodeInstance::setInternalOption(
}
bool suspend = *(bool *)data;
+ CLOG_CONFIG(setInternalOption, "suspend=%d", suspend);
bufferSource->suspend(suspend);
} else if (type ==
IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY){
@@ -945,7 +1118,7 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t delayUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "delayUs=%lld", (long long)delayUs);
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
} else if (type ==
IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
@@ -954,7 +1127,7 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t maxGapUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
return bufferSource->setMaxTimestampGapUs(maxGapUs);
} else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
if (size != sizeof(int64_t)) {
@@ -962,13 +1135,18 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t skipFramesBeforeUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "beforeUs=%lld", (long long)skipFramesBeforeUs);
bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
} else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
if (size != sizeof(int64_t) * 2) {
return INVALID_OPERATION;
}
+ int64_t timePerFrameUs = ((int64_t *)data)[0];
+ int64_t timePerCaptureUs = ((int64_t *)data)[1];
+ CLOG_CONFIG(setInternalOption, "perFrameUs=%lld perCaptureUs=%lld",
+ (long long)timePerFrameUs, (long long)timePerCaptureUs);
+
bufferSource->setTimeLapseUs((int64_t *)data);
}
@@ -987,6 +1165,16 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
OMX_BUFFERHEADERTYPE *buffer =
findBufferHeader(msg.u.extended_buffer_data.buffer);
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ FBD, WITH_STATS(FULL_BUFFER(msg.u.extended_buffer_data.buffer, buffer)));
+
+ unbumpDebugLevel_l(kPortIndexOutput);
+ }
+
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(buffer->pAppPrivate);
@@ -1002,16 +1190,23 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
return;
}
} else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
+ OMX_BUFFERHEADERTYPE *buffer =
+ findBufferHeader(msg.u.buffer_data.buffer);
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer)));
+ }
+
if (bufferSource != NULL) {
// This is one of the buffers used exclusively by
// GraphicBufferSource.
// Don't dispatch a message back to ACodec, since it doesn't
// know that anyone asked to have the buffer emptied and will
// be very confused.
-
- OMX_BUFFERHEADERTYPE *buffer =
- findBufferHeader(msg.u.buffer_data.buffer);
-
bufferSource->codecBufferEmptied(buffer);
return;
}
@@ -1035,6 +1230,43 @@ void OMXNodeInstance::onGetHandleFailed() {
// Don't try to acquire mLock here -- in rare circumstances this will hang.
void OMXNodeInstance::onEvent(
OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
+ const char *arg1String = "??";
+ const char *arg2String = "??";
+ ADebug::Level level = ADebug::kDebugInternalState;
+
+ switch (event) {
+ case OMX_EventCmdComplete:
+ arg1String = asString((OMX_COMMANDTYPE)arg1);
+ switch (arg1) {
+ case OMX_CommandStateSet:
+ arg2String = asString((OMX_STATETYPE)arg2);
+ level = ADebug::kDebugState;
+ break;
+ case OMX_CommandFlush:
+ case OMX_CommandPortEnable:
+ {
+ // bump internal-state debug level for 2 input and output frames
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+ // fall through
+ default:
+ arg2String = portString(arg2);
+ }
+ break;
+ case OMX_EventError:
+ arg1String = asString((OMX_ERRORTYPE)arg1);
+ level = ADebug::kDebugLifeCycle;
+ break;
+ case OMX_EventPortSettingsChanged:
+ arg2String = asString((OMX_INDEXEXTTYPE)arg2);
+ // fall through
+ default:
+ arg1String = portString(arg1);
+ }
+
+ CLOGI_(level, onEvent, "%s(%x), %s(%x), %s(%x)",
+ asString(event), event, arg1String, arg1, arg2String, arg2);
const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
if (bufferSource != NULL
@@ -1092,23 +1324,27 @@ void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
active.mPortIndex = portIndex;
active.mID = id;
mActiveBuffers.push(active);
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ ++mNumPortBuffers[portIndex];
+ }
}
void OMXNodeInstance::removeActiveBuffer(
OMX_U32 portIndex, OMX::buffer_id id) {
- bool found = false;
for (size_t i = 0; i < mActiveBuffers.size(); ++i) {
if (mActiveBuffers[i].mPortIndex == portIndex
- && mActiveBuffers[i].mID == id) {
- found = true;
+ && mActiveBuffers[i].mID == id) {
mActiveBuffers.removeItemsAt(i);
- break;
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ --mNumPortBuffers[portIndex];
+ }
+ return;
}
}
- if (!found) {
- ALOGW("Attempt to remove an active buffer we know nothing about...");
- }
+ CLOGW("Attempt to remove an active buffer [%#x] we know nothing about...", id);
}
void OMXNodeInstance::freeActiveBuffers() {
@@ -1129,7 +1365,7 @@ OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader)
OMX::buffer_id buffer;
do { // handle the very unlikely case of ID overflow
if (++mBufferIDCount == 0) {
- ++mBufferIDCount;
+ ++mBufferIDCount;
}
buffer = (OMX::buffer_id)mBufferIDCount;
} while (mBufferIDToBufferHeader.indexOfKey(buffer) >= 0);
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 4999663..7f99dcd 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -152,28 +152,28 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::internalSetParameter(
OMX_PARAM_PORTDEFINITIONTYPE *defParams =
(OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (defParams->nPortIndex >= mPorts.size()
- || defParams->nSize
- != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
- return OMX_ErrorUndefined;
+ if (defParams->nPortIndex >= mPorts.size()) {
+ return OMX_ErrorBadPortIndex;
+ }
+ if (defParams->nSize != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
+ return OMX_ErrorUnsupportedSetting;
}
PortInfo *port =
&mPorts.editItemAt(defParams->nPortIndex);
- if (defParams->nBufferSize != port->mDef.nBufferSize) {
- CHECK_GE(defParams->nBufferSize, port->mDef.nBufferSize);
+ // default behavior is that we only allow buffer size to increase
+ if (defParams->nBufferSize > port->mDef.nBufferSize) {
port->mDef.nBufferSize = defParams->nBufferSize;
}
- if (defParams->nBufferCountActual
- != port->mDef.nBufferCountActual) {
- CHECK_GE(defParams->nBufferCountActual,
- port->mDef.nBufferCountMin);
-
- port->mDef.nBufferCountActual = defParams->nBufferCountActual;
+ if (defParams->nBufferCountActual < port->mDef.nBufferCountMin) {
+ ALOGW("component requires at least %u buffers (%u requested)",
+ port->mDef.nBufferCountMin, defParams->nBufferCountActual);
+ return OMX_ErrorUnsupportedSetting;
}
+ port->mDef.nBufferCountActual = defParams->nBufferCountActual;
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
index 646cd32..df978f8 100644
--- a/media/libstagefright/omx/SoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -283,7 +283,7 @@ OMX_ERRORTYPE SoftOMXComponent::setConfig(
OMX_ERRORTYPE SoftOMXComponent::getExtensionIndex(
const char * /* name */, OMX_INDEXTYPE * /* index */) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorUnsupportedIndex;
}
OMX_ERRORTYPE SoftOMXComponent::useBuffer(
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 2f83610..532cf2f 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -26,6 +26,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
namespace android {
@@ -61,6 +62,8 @@ SoftVideoDecoderOMXComponent::SoftVideoDecoderOMXComponent(
mCropWidth(width),
mCropHeight(height),
mOutputPortSettingsChange(NONE),
+ mMinInputBufferSize(384), // arbitrary, using one uncompressed macroblock
+ mMinCompressionRatio(1), // max input size is normally the output size
mComponentRole(componentRole),
mCodingType(codingType),
mProfileLevels(profileLevels),
@@ -71,7 +74,11 @@ void SoftVideoDecoderOMXComponent::initPorts(
OMX_U32 numInputBuffers,
OMX_U32 inputBufferSize,
OMX_U32 numOutputBuffers,
- const char *mimeType) {
+ const char *mimeType,
+ OMX_U32 minCompressionRatio) {
+ mMinInputBufferSize = inputBufferSize;
+ mMinCompressionRatio = minCompressionRatio;
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -120,27 +127,30 @@ void SoftVideoDecoderOMXComponent::initPorts(
addPort(def);
- updatePortDefinitions();
+ updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
}
-void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop) {
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def->nBufferSize = def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;
-
- def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = outputBufferWidth();
- def->format.video.nFrameHeight = outputBufferHeight();
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def->nBufferSize =
- (def->format.video.nFrameWidth *
- def->format.video.nFrameHeight * 3) / 2;
+void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+ OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+ outDef->format.video.nFrameWidth = outputBufferWidth();
+ outDef->format.video.nFrameHeight = outputBufferHeight();
+ outDef->format.video.nStride = outDef->format.video.nFrameWidth;
+ outDef->format.video.nSliceHeight = outDef->format.video.nFrameHeight;
+
+ outDef->nBufferSize =
+ (outDef->format.video.nStride * outDef->format.video.nSliceHeight * 3) / 2;
+
+ OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+ inDef->format.video.nFrameWidth = mWidth;
+ inDef->format.video.nFrameHeight = mHeight;
+ // input port is compressed, hence it has no stride
+ inDef->format.video.nStride = 0;
+ inDef->format.video.nSliceHeight = 0;
+
+ // when output format changes, input buffer size does not actually change
+ if (updateInputSize) {
+ inDef->nBufferSize = max(outDef->nBufferSize / mMinCompressionRatio, mMinInputBufferSize);
+ }
if (updateCrop) {
mCropLeft = 0;
@@ -169,7 +179,8 @@ void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
bool strideChanged = false;
if (fakeStride) {
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
- if (def->format.video.nStride != width || def->format.video.nSliceHeight != height) {
+ if (def->format.video.nStride != (OMX_S32)width
+ || def->format.video.nSliceHeight != (OMX_U32)height) {
strideChanged = true;
}
}
@@ -252,7 +263,7 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
(OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
if (formatParams->nPortIndex > kMaxPortIndex) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorBadPortIndex;
}
if (formatParams->nIndex != 0) {
@@ -324,13 +335,25 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
(OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
if (formatParams->nPortIndex > kMaxPortIndex) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorBadPortIndex;
}
if (formatParams->nIndex != 0) {
return OMX_ErrorNoMore;
}
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ if (formatParams->eCompressionFormat != mCodingType
+ || formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else {
+ if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused
+ || formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ }
+
return OMX_ErrorNone;
}
@@ -348,7 +371,7 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
mAdaptiveMaxWidth = 0;
mAdaptiveMaxHeight = 0;
}
- updatePortDefinitions();
+ updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
return OMX_ErrorNone;
}
@@ -369,11 +392,18 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
(mIsAdaptive && outputPort) ? mAdaptiveMaxWidth : newWidth;
def->format.video.nFrameHeight =
(mIsAdaptive && outputPort) ? mAdaptiveMaxHeight : newHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
- def->nBufferSize =
- def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;
if (outputPort) {
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+ def->nBufferSize =
+ def->format.video.nStride * def->format.video.nSliceHeight * 3 / 2;
+
+
+ OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+ // increase input buffer size if required
+ inDef->nBufferSize =
+ max(def->nBufferSize / mMinCompressionRatio, inDef->nBufferSize);
+
mWidth = newWidth;
mHeight = newHeight;
mCropLeft = 0;
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 70ec6e4..d4d6217 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -19,6 +19,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftVideoEncoderOMXComponent"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "include/SoftVideoEncoderOMXComponent.h"
@@ -27,6 +28,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <ui/GraphicBuffer.h>
@@ -34,13 +36,316 @@
namespace android {
+const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
+ OMX_COLOR_FormatYUV420Planar,
+ OMX_COLOR_FormatYUV420SemiPlanar,
+ OMX_COLOR_FormatAndroidOpaque
+};
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
+ int32_t width,
+ int32_t height,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
- mGrallocModule(NULL) {
+ mInputDataIsMeta(false),
+ mWidth(width),
+ mHeight(height),
+ mBitrate(192000),
+ mFramerate(30 << 16), // Q16 format
+ mColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mGrallocModule(NULL),
+ mMinOutputBufferSize(384), // arbitrary, using one uncompressed macroblock
+ mMinCompressionRatio(1), // max output size is normally the input size
+ mComponentRole(componentRole),
+ mCodingType(codingType),
+ mProfileLevels(profileLevels),
+ mNumProfileLevels(numProfileLevels) {
+}
+
+void SoftVideoEncoderOMXComponent::initPorts(
+ OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+ const char *mime, OMX_U32 minCompressionRatio) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+
+ mMinOutputBufferSize = outputBufferSize;
+ mMinCompressionRatio = minCompressionRatio;
+
+ InitOMXParams(&def);
+
+ def.nPortIndex = kInputPortIndex;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = numInputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ // frameRate is in Q16 format.
+ def.format.video.xFramerate = mFramerate;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.nBufferAlignment = kInputBufferAlignment;
+ def.format.video.cMIMEType = const_cast<char *>("video/raw");
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = mColorFormat;
+ def.format.video.pNativeWindow = NULL;
+ // buffersize set in updatePortParams
+
+ addPort(def);
+
+ InitOMXParams(&def);
+
+ def.nPortIndex = kOutputPortIndex;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = numOutputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = 0;
+ def.format.video.nSliceHeight = 0;
+ def.format.video.nBitrate = mBitrate;
+ def.format.video.xFramerate = 0 << 16;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.nBufferAlignment = kOutputBufferAlignment;
+ def.format.video.cMIMEType = const_cast<char *>(mime);
+ def.format.video.eCompressionFormat = mCodingType;
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+ // buffersize set in updatePortParams
+
+ addPort(def);
+
+ updatePortParams();
+}
+
+void SoftVideoEncoderOMXComponent::updatePortParams() {
+ OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+ inDef->format.video.nFrameWidth = mWidth;
+ inDef->format.video.nFrameHeight = mHeight;
+ inDef->format.video.nStride = inDef->format.video.nFrameWidth;
+ inDef->format.video.nSliceHeight = inDef->format.video.nFrameHeight;
+ inDef->format.video.xFramerate = mFramerate;
+ inDef->format.video.eColorFormat = mColorFormat;
+ uint32_t rawBufferSize =
+ inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;
+ if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ inDef->nBufferSize = 4 + max(sizeof(buffer_handle_t), sizeof(GraphicBuffer *));
+ } else {
+ inDef->nBufferSize = rawBufferSize;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+ outDef->format.video.nFrameWidth = mWidth;
+ outDef->format.video.nFrameHeight = mHeight;
+ outDef->format.video.nBitrate = mBitrate;
+
+ outDef->nBufferSize = max(mMinOutputBufferSize, rawBufferSize / mMinCompressionRatio);
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams(
+ const OMX_PARAM_PORTDEFINITIONTYPE *port) {
+ if (port->nPortIndex == kInputPortIndex) {
+ mWidth = port->format.video.nFrameWidth;
+ mHeight = port->format.video.nFrameHeight;
+
+ // xFramerate comes in Q16 format, in frames per second unit
+ mFramerate = port->format.video.xFramerate;
+
+ if (port->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused
+ || (port->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar
+ && port->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar
+ && port->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ mColorFormat = port->format.video.eColorFormat;
+ } else if (port->nPortIndex == kOutputPortIndex) {
+ if (port->format.video.eCompressionFormat != mCodingType
+ || port->format.video.eColorFormat != OMX_COLOR_FormatUnused) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ mBitrate = port->format.video.nBitrate;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ updatePortParams();
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoErrorCorrection:
+ {
+ return OMX_ErrorNotImplemented;
+ }
+
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)param;
+
+ if (strncmp((const char *)roleParams->cRole,
+ mComponentRole,
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_ERRORTYPE err = internalSetPortParams((const OMX_PARAM_PORTDEFINITIONTYPE *)param);
+
+ if (err != OMX_ErrorNone) {
+ return err;
+ }
+
+ return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ const OMX_VIDEO_PARAM_PORTFORMATTYPE* format =
+ (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+ if (format->nPortIndex == kInputPortIndex) {
+ if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
+ format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = format->eColorFormat;
+
+ updatePortParams();
+ return OMX_ErrorNone;
+ } else {
+ ALOGE("Unsupported color format %i", format->eColorFormat);
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else if (format->nPortIndex == kOutputPortIndex) {
+ if (format->eCompressionFormat == mCodingType) {
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+ }
+
+ case kStoreMetaDataExtensionIndex:
+ {
+ // storeMetaDataInBuffers
+ const StoreMetaDataInBuffersParams *storeParam =
+ (const StoreMetaDataInBuffersParams *)param;
+
+ if (storeParam->nPortIndex == kOutputPortIndex) {
+ return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone;
+ } else if (storeParam->nPortIndex != kInputPortIndex) {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
+ if (mInputDataIsMeta) {
+ mColorFormat = OMX_COLOR_FormatAndroidOpaque;
+ } else if (mColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = OMX_COLOR_FormatYUV420Planar;
+ }
+ updatePortParams();
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param) {
+ switch (index) {
+ case OMX_IndexParamVideoErrorCorrection:
+ {
+ return OMX_ErrorNotImplemented;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) {
+ return OMX_ErrorNoMore;
+ }
+
+ // Color formats, in order of preference
+ formatParams->eColorFormat = kSupportedColorFormats[formatParams->nIndex];
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->xFramerate = mFramerate;
+ return OMX_ErrorNone;
+ } else if (formatParams->nPortIndex == kOutputPortIndex) {
+ formatParams->eCompressionFormat = mCodingType;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+ }
+
+ case OMX_IndexParamVideoProfileLevelQuerySupported:
+ {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
+ (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param;
+
+ if (profileLevel->nPortIndex != kOutputPortIndex) {
+ ALOGE("Invalid port index: %u", profileLevel->nPortIndex);
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (profileLevel->nProfileIndex >= mNumProfileLevels) {
+ return OMX_ErrorNoMore;
+ }
+
+ profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+ profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, param);
+ }
}
// static
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
index f2825dd..5c323c1 100644
--- a/media/libstagefright/tests/Utils_test.cpp
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -24,6 +24,7 @@
#include <unistd.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AStringUtils.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/Utils.h>
@@ -32,6 +33,100 @@ namespace android {
class UtilsTest : public ::testing::Test {
};
+TEST_F(UtilsTest, TestStringUtils) {
+ ASSERT_EQ(AStringUtils::Compare("Audio", "AudioExt", 5, false), 0);
+ ASSERT_EQ(AStringUtils::Compare("Audio", "audiOExt", 5, true), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "audioExt", 5, false), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "AudiOExt", 5, false), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "AudioExt", 7, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio", "audiOExt", 7, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("AudioExt", "Audio", 7, false), 0);
+ ASSERT_GT(AStringUtils::Compare("audiOext", "Audio", 7, true), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "Video", 5, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio1", "Audio2", 6, false), 0);
+ ASSERT_LT(AStringUtils::Compare("audio", "VIDEO", 5, true), 0);
+ ASSERT_LT(AStringUtils::Compare("audio1", "AUDIO2", 6, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("Video", "Audio", 5, false), 0);
+ ASSERT_GT(AStringUtils::Compare("Audio2", "Audio1", 6, false), 0);
+ ASSERT_GT(AStringUtils::Compare("VIDEO", "audio", 5, true), 0);
+ ASSERT_GT(AStringUtils::Compare("AUDIO2", "audio1", 6, true), 0);
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "AudioB", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, false));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring1", 5, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring2", 5, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring4", 5, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring5", 5, "StrinG8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*1", 4, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*2", 4, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*3", 4, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*4", 4, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*5", 4, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*6", 4, "AString8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng1", 6, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng2", 6, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng3", 6, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng4", 6, "StriNg8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng5", 6, "StrinG8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng6", 6, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestroom", 9, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestrestroom", 13, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTReSTRoom", 9, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTRestreSTRoom", 13, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestreSTReSTRoom", 14, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestreSTReSTRoom", 14, true));
+}
+
+TEST_F(UtilsTest, TestDebug) {
+#define LVL(x) (ADebug::Level)(x)
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "", LVL(5)), LVL(5));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", " \t \n ", LVL(2)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video", "\t\n 3 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo,2:vid*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "avideo", "\t\n 3 \t\n:\t\n avideo \t\n,\t\n 2 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "audio.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));
+#undef LVL
+}
+
TEST_F(UtilsTest, TestFourCC) {
ASSERT_EQ(FOURCC('s', 't', 'm' , 'u'), 'stmu');
}
@@ -77,6 +172,13 @@ TEST_F(UtilsTest, TestMathTemplates) {
ASSERT_EQ(divUp(12, 4), 3);
ASSERT_EQ(divUp(13, 4), 4);
+ ASSERT_EQ(align(11, 4), 12);
+ ASSERT_EQ(align(12, 4), 12);
+ ASSERT_EQ(align(13, 4), 16);
+ ASSERT_EQ(align(11, 8), 16);
+ ASSERT_EQ(align(11, 2), 12);
+ ASSERT_EQ(align(11, 1), 11);
+
ASSERT_EQ(abs(5L), 5L);
ASSERT_EQ(abs(-25), 25);
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index e6e19e3..052b700 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -51,104 +51,178 @@ void MtpDataPacket::setTransactionID(MtpTransactionID id) {
MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
}
-uint16_t MtpDataPacket::getUInt16() {
+bool MtpDataPacket::getUInt8(uint8_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
+ value = mBuffer[mOffset++];
+ return true;
+}
+
+bool MtpDataPacket::getUInt16(uint16_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
- mOffset += 2;
- return result;
+ value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += sizeof(value);
+ return true;
}
-uint32_t MtpDataPacket::getUInt32() {
+bool MtpDataPacket::getUInt32(uint32_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
- mOffset += 4;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-uint64_t MtpDataPacket::getUInt64() {
+bool MtpDataPacket::getUInt64(uint64_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
- mOffset += 8;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-void MtpDataPacket::getUInt128(uint128_t& value) {
- value[0] = getUInt32();
- value[1] = getUInt32();
- value[2] = getUInt32();
- value[3] = getUInt32();
+bool MtpDataPacket::getUInt128(uint128_t& value) {
+ return getUInt32(value[0]) && getUInt32(value[1]) && getUInt32(value[2]) && getUInt32(value[3]);
}
-void MtpDataPacket::getString(MtpStringBuffer& string)
+bool MtpDataPacket::getString(MtpStringBuffer& string)
{
- string.readFromPacket(this);
+ return string.readFromPacket(this);
}
Int8List* MtpDataPacket::getAInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int8List* result = new Int8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ int8_t value;
+ if (!getInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt8List* MtpDataPacket::getAUInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt8List* result = new UInt8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ uint8_t value;
+ if (!getUInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int16List* MtpDataPacket::getAInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int16List* result = new Int16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ int16_t value;
+ if (!getInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt16List* MtpDataPacket::getAUInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt16List* result = new UInt16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ uint16_t value;
+ if (!getUInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int32List* MtpDataPacket::getAInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int32List* result = new Int32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ int32_t value;
+ if (!getInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt32List* MtpDataPacket::getAUInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt32List* result = new UInt32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ uint32_t value;
+ if (!getUInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int64List* MtpDataPacket::getAInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int64List* result = new Int64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ int64_t value;
+ if (!getInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt64List* MtpDataPacket::getAUInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt64List* result = new UInt64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ uint64_t value;
+ if (!getUInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
index 2b81063..13d3bd9 100644
--- a/media/mtp/MtpDataPacket.h
+++ b/media/mtp/MtpDataPacket.h
@@ -30,7 +30,7 @@ class MtpStringBuffer;
class MtpDataPacket : public MtpPacket {
private:
// current offset for get/put methods
- int mOffset;
+ size_t mOffset;
public:
MtpDataPacket();
@@ -42,17 +42,18 @@ public:
void setTransactionID(MtpTransactionID id);
inline const uint8_t* getData() const { return mBuffer + MTP_CONTAINER_HEADER_SIZE; }
- inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
- inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
- uint16_t getUInt16();
- inline int16_t getInt16() { return (int16_t)getUInt16(); }
- uint32_t getUInt32();
- inline int32_t getInt32() { return (int32_t)getUInt32(); }
- uint64_t getUInt64();
- inline int64_t getInt64() { return (int64_t)getUInt64(); }
- void getUInt128(uint128_t& value);
- inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
- void getString(MtpStringBuffer& string);
+
+ bool getUInt8(uint8_t& value);
+ inline bool getInt8(int8_t& value) { return getUInt8((uint8_t&)value); }
+ bool getUInt16(uint16_t& value);
+ inline bool getInt16(int16_t& value) { return getUInt16((uint16_t&)value); }
+ bool getUInt32(uint32_t& value);
+ inline bool getInt32(int32_t& value) { return getUInt32((uint32_t&)value); }
+ bool getUInt64(uint64_t& value);
+ inline bool getInt64(int64_t& value) { return getUInt64((uint64_t&)value); }
+ bool getUInt128(uint128_t& value);
+ inline bool getInt128(int128_t& value) { return getUInt128((uint128_t&)value); }
+ bool getString(MtpStringBuffer& string);
Int8List* getAInt8();
UInt8List* getAUInt8();
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index 96331b5..3eafd6f 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -322,8 +322,10 @@ MtpDeviceInfo* MtpDevice::getDeviceInfo() {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpDeviceInfo* info = new MtpDeviceInfo;
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -355,8 +357,10 @@ MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpStorageInfo* info = new MtpStorageInfo(storageID);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -394,8 +398,10 @@ MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpObjectInfo* info = new MtpObjectInfo(handle);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -556,8 +562,10 @@ MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
@@ -575,15 +583,17 @@ MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectForma
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
bool MtpDevice::readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset, int length, void* clientData),
- int objectSize, void* clientData) {
+ size_t objectSize, void* clientData) {
Mutex::Autolock autoLock(mMutex);
bool result = false;
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index b69203e..9b0acbf 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -98,7 +98,7 @@ public:
bool readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset,
int length, void* clientData),
- int objectSize, void* clientData);
+ size_t objectSize, void* clientData);
bool readObject(MtpObjectHandle handle, const char* destPath, int group,
int perm);
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
index 108e2b8..3e1dff7 100644
--- a/media/mtp/MtpDeviceInfo.cpp
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -28,7 +28,7 @@ MtpDeviceInfo::MtpDeviceInfo()
mVendorExtensionID(0),
mVendorExtensionVersion(0),
mVendorExtensionDesc(NULL),
- mFunctionalCode(0),
+ mFunctionalMode(0),
mOperations(NULL),
mEvents(NULL),
mDeviceProperties(NULL),
@@ -59,39 +59,46 @@ MtpDeviceInfo::~MtpDeviceInfo() {
free(mSerial);
}
-void MtpDeviceInfo::read(MtpDataPacket& packet) {
+bool MtpDeviceInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStandardVersion = packet.getUInt16();
- mVendorExtensionID = packet.getUInt32();
- mVendorExtensionVersion = packet.getUInt16();
+ if (!packet.getUInt16(mStandardVersion)) return false;
+ if (!packet.getUInt32(mVendorExtensionID)) return false;
+ if (!packet.getUInt16(mVendorExtensionVersion)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVendorExtensionDesc = strdup((const char *)string);
- mFunctionalCode = packet.getUInt16();
+ if (!packet.getUInt16(mFunctionalMode)) return false;
mOperations = packet.getAUInt16();
+ if (!mOperations) return false;
mEvents = packet.getAUInt16();
+ if (!mEvents) return false;
mDeviceProperties = packet.getAUInt16();
+ if (!mDeviceProperties) return false;
mCaptureFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
mPlaybackFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mManufacturer = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mModel = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVersion = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mSerial = strdup((const char *)string);
+
+ return true;
}
void MtpDeviceInfo::print() {
ALOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
- ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
- mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+ ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalMode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalMode, mManufacturer, mModel, mVersion, mSerial);
}
} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
index 2abaa10..bcda9a5 100644
--- a/media/mtp/MtpDeviceInfo.h
+++ b/media/mtp/MtpDeviceInfo.h
@@ -29,7 +29,7 @@ public:
uint32_t mVendorExtensionID;
uint16_t mVendorExtensionVersion;
char* mVendorExtensionDesc;
- uint16_t mFunctionalCode;
+ uint16_t mFunctionalMode;
UInt16List* mOperations;
UInt16List* mEvents;
MtpDevicePropertyList* mDeviceProperties;
@@ -44,7 +44,7 @@ public:
MtpDeviceInfo();
virtual ~MtpDeviceInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
index cd15343..0573104 100644
--- a/media/mtp/MtpObjectInfo.cpp
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -55,39 +55,41 @@ MtpObjectInfo::~MtpObjectInfo() {
free(mKeywords);
}
-void MtpObjectInfo::read(MtpDataPacket& packet) {
+bool MtpObjectInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
time_t time;
- mStorageID = packet.getUInt32();
- mFormat = packet.getUInt16();
- mProtectionStatus = packet.getUInt16();
- mCompressedSize = packet.getUInt32();
- mThumbFormat = packet.getUInt16();
- mThumbCompressedSize = packet.getUInt32();
- mThumbPixWidth = packet.getUInt32();
- mThumbPixHeight = packet.getUInt32();
- mImagePixWidth = packet.getUInt32();
- mImagePixHeight = packet.getUInt32();
- mImagePixDepth = packet.getUInt32();
- mParent = packet.getUInt32();
- mAssociationType = packet.getUInt16();
- mAssociationDesc = packet.getUInt32();
- mSequenceNumber = packet.getUInt32();
+ if (!packet.getUInt32(mStorageID)) return false;
+ if (!packet.getUInt16(mFormat)) return false;
+ if (!packet.getUInt16(mProtectionStatus)) return false;
+ if (!packet.getUInt32(mCompressedSize)) return false;
+ if (!packet.getUInt16(mThumbFormat)) return false;
+ if (!packet.getUInt32(mThumbCompressedSize)) return false;
+ if (!packet.getUInt32(mThumbPixWidth)) return false;
+ if (!packet.getUInt32(mThumbPixHeight)) return false;
+ if (!packet.getUInt32(mImagePixWidth)) return false;
+ if (!packet.getUInt32(mImagePixHeight)) return false;
+ if (!packet.getUInt32(mImagePixDepth)) return false;
+ if (!packet.getUInt32(mParent)) return false;
+ if (!packet.getUInt16(mAssociationType)) return false;
+ if (!packet.getUInt32(mAssociationDesc)) return false;
+ if (!packet.getUInt32(mSequenceNumber)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mName = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateCreated = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateModified = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mKeywords = strdup((const char *)string);
+
+ return true;
}
void MtpObjectInfo::print() {
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
index c7a449c..86780f1 100644
--- a/media/mtp/MtpObjectInfo.h
+++ b/media/mtp/MtpObjectInfo.h
@@ -50,7 +50,7 @@ public:
MtpObjectInfo(MtpObjectHandle handle);
virtual ~MtpObjectInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index dd07843..bab1335 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -52,7 +52,7 @@ void MtpPacket::reset() {
memset(mBuffer, 0, mBufferSize);
}
-void MtpPacket::allocate(int length) {
+void MtpPacket::allocate(size_t length) {
if (length > mBufferSize) {
int newLength = length + mAllocationIncrement;
mBuffer = (uint8_t *)realloc(mBuffer, newLength);
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
index 0ffb1d3..037722a 100644
--- a/media/mtp/MtpPacket.h
+++ b/media/mtp/MtpPacket.h
@@ -28,11 +28,11 @@ class MtpPacket {
protected:
uint8_t* mBuffer;
// current size of the buffer
- int mBufferSize;
+ size_t mBufferSize;
// number of bytes to add when resizing the buffer
- int mAllocationIncrement;
+ size_t mAllocationIncrement;
// size of the data in the packet
- int mPacketSize;
+ size_t mPacketSize;
public:
MtpPacket(int bufferSize);
@@ -41,7 +41,7 @@ public:
// sets packet size to the default container size and sets buffer to zero
virtual void reset();
- void allocate(int length);
+ void allocate(size_t length);
void dump();
void copyFrom(const MtpPacket& src);
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index c500901..d58e2a4 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -106,15 +106,15 @@ MtpProperty::~MtpProperty() {
free(mMinimumValue.str);
free(mMaximumValue.str);
if (mDefaultArrayValues) {
- for (int i = 0; i < mDefaultArrayLength; i++)
+ for (uint32_t i = 0; i < mDefaultArrayLength; i++)
free(mDefaultArrayValues[i].str);
}
if (mCurrentArrayValues) {
- for (int i = 0; i < mCurrentArrayLength; i++)
+ for (uint32_t i = 0; i < mCurrentArrayLength; i++)
free(mCurrentArrayValues[i].str);
}
if (mEnumValues) {
- for (int i = 0; i < mEnumLength; i++)
+ for (uint16_t i = 0; i < mEnumLength; i++)
free(mEnumValues[i].str);
}
}
@@ -123,11 +123,14 @@ MtpProperty::~MtpProperty() {
delete[] mEnumValues;
}
-void MtpProperty::read(MtpDataPacket& packet) {
- mCode = packet.getUInt16();
+bool MtpProperty::read(MtpDataPacket& packet) {
+ uint8_t temp8;
+
+ if (!packet.getUInt16(mCode)) return false;
bool deviceProp = isDeviceProperty();
- mType = packet.getUInt16();
- mWriteable = (packet.getUInt8() == 1);
+ if (!packet.getUInt16(mType)) return false;
+ if (!packet.getUInt8(temp8)) return false;
+ mWriteable = (temp8 == 1);
switch (mType) {
case MTP_TYPE_AINT8:
case MTP_TYPE_AUINT8:
@@ -140,28 +143,36 @@ void MtpProperty::read(MtpDataPacket& packet) {
case MTP_TYPE_AINT128:
case MTP_TYPE_AUINT128:
mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
- if (deviceProp)
+ if (!mDefaultArrayValues) return false;
+ if (deviceProp) {
mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ if (!mCurrentArrayValues) return false;
+ }
break;
default:
- readValue(packet, mDefaultValue);
- if (deviceProp)
- readValue(packet, mCurrentValue);
+ if (!readValue(packet, mDefaultValue)) return false;
+ if (deviceProp) {
+ if (!readValue(packet, mCurrentValue)) return false;
+ }
}
- if (!deviceProp)
- mGroupCode = packet.getUInt32();
- mFormFlag = packet.getUInt8();
+ if (!deviceProp) {
+ if (!packet.getUInt32(mGroupCode)) return false;
+ }
+ if (!packet.getUInt8(mFormFlag)) return false;
if (mFormFlag == kFormRange) {
- readValue(packet, mMinimumValue);
- readValue(packet, mMaximumValue);
- readValue(packet, mStepSize);
+ if (!readValue(packet, mMinimumValue)) return false;
+ if (!readValue(packet, mMaximumValue)) return false;
+ if (!readValue(packet, mStepSize)) return false;
} else if (mFormFlag == kFormEnum) {
- mEnumLength = packet.getUInt16();
+ if (!packet.getUInt16(mEnumLength)) return false;
mEnumValues = new MtpPropertyValue[mEnumLength];
- for (int i = 0; i < mEnumLength; i++)
- readValue(packet, mEnumValues[i]);
+ for (int i = 0; i < mEnumLength; i++) {
+ if (!readValue(packet, mEnumValues[i])) return false;
+ }
}
+
+ return true;
}
void MtpProperty::write(MtpDataPacket& packet) {
@@ -409,57 +420,59 @@ void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
}
}
-void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+bool MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
MtpStringBuffer stringBuffer;
switch (mType) {
case MTP_TYPE_INT8:
case MTP_TYPE_AINT8:
- value.u.i8 = packet.getInt8();
+ if (!packet.getInt8(value.u.i8)) return false;
break;
case MTP_TYPE_UINT8:
case MTP_TYPE_AUINT8:
- value.u.u8 = packet.getUInt8();
+ if (!packet.getUInt8(value.u.u8)) return false;
break;
case MTP_TYPE_INT16:
case MTP_TYPE_AINT16:
- value.u.i16 = packet.getInt16();
+ if (!packet.getInt16(value.u.i16)) return false;
break;
case MTP_TYPE_UINT16:
case MTP_TYPE_AUINT16:
- value.u.u16 = packet.getUInt16();
+ if (!packet.getUInt16(value.u.u16)) return false;
break;
case MTP_TYPE_INT32:
case MTP_TYPE_AINT32:
- value.u.i32 = packet.getInt32();
+ if (!packet.getInt32(value.u.i32)) return false;
break;
case MTP_TYPE_UINT32:
case MTP_TYPE_AUINT32:
- value.u.u32 = packet.getUInt32();
+ if (!packet.getUInt32(value.u.u32)) return false;
break;
case MTP_TYPE_INT64:
case MTP_TYPE_AINT64:
- value.u.i64 = packet.getInt64();
+ if (!packet.getInt64(value.u.i64)) return false;
break;
case MTP_TYPE_UINT64:
case MTP_TYPE_AUINT64:
- value.u.u64 = packet.getUInt64();
+ if (!packet.getUInt64(value.u.u64)) return false;
break;
case MTP_TYPE_INT128:
case MTP_TYPE_AINT128:
- packet.getInt128(value.u.i128);
+ if (!packet.getInt128(value.u.i128)) return false;
break;
case MTP_TYPE_UINT128:
case MTP_TYPE_AUINT128:
- packet.getUInt128(value.u.u128);
+ if (!packet.getUInt128(value.u.u128)) return false;
break;
case MTP_TYPE_STR:
- packet.getString(stringBuffer);
+ if (!packet.getString(stringBuffer)) return false;
value.str = strdup(stringBuffer);
break;
default:
ALOGE("unknown type %04X in MtpProperty::readValue", mType);
+ return false;
}
+ return true;
}
void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
@@ -517,8 +530,9 @@ void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
}
}
-MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
- length = packet.getUInt32();
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, uint32_t& length) {
+ if (!packet.getUInt32(length)) return NULL;
+
// Fail if resulting array is over 2GB. This is because the maximum array
// size may be less than SIZE_MAX on some platforms.
if ( CC_UNLIKELY(
@@ -528,14 +542,17 @@ MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& lengt
return NULL;
}
MtpPropertyValue* result = new MtpPropertyValue[length];
- for (int i = 0; i < length; i++)
- readValue(packet, result[i]);
+ for (uint32_t i = 0; i < length; i++)
+ if (!readValue(packet, result[i])) {
+ delete result;
+ return NULL;
+ }
return result;
}
-void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, uint32_t length) {
packet.putUInt32(length);
- for (int i = 0; i < length; i++)
+ for (uint32_t i = 0; i < length; i++)
writeValue(packet, values[i]);
}
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index 06ca56e..2e2ead1 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -49,9 +49,9 @@ public:
MtpPropertyValue mCurrentValue;
// for array types
- int mDefaultArrayLength;
+ uint32_t mDefaultArrayLength;
MtpPropertyValue* mDefaultArrayValues;
- int mCurrentArrayLength;
+ uint32_t mCurrentArrayLength;
MtpPropertyValue* mCurrentArrayValues;
enum {
@@ -70,7 +70,7 @@ public:
MtpPropertyValue mStepSize;
// for enum form
- int mEnumLength;
+ uint16_t mEnumLength;
MtpPropertyValue* mEnumValues;
public:
@@ -83,7 +83,7 @@ public:
inline MtpPropertyCode getPropertyCode() const { return mCode; }
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void write(MtpDataPacket& packet);
void setDefaultValue(const uint16_t* string);
@@ -102,11 +102,11 @@ public:
}
private:
- void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ bool readValue(MtpDataPacket& packet, MtpPropertyValue& value);
void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
- MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, uint32_t& length);
void writeArrayValues(MtpDataPacket& packet,
- MtpPropertyValue* values, int length);
+ MtpPropertyValue* values, uint32_t length);
};
}; // namespace android
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
index 0e58e01..40b11b0 100644
--- a/media/mtp/MtpRequestPacket.cpp
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -27,7 +27,8 @@
namespace android {
MtpRequestPacket::MtpRequestPacket()
- : MtpPacket(512)
+ : MtpPacket(512),
+ mParameterCount(0)
{
}
@@ -37,10 +38,21 @@ MtpRequestPacket::~MtpRequestPacket() {
#ifdef MTP_DEVICE
int MtpRequestPacket::read(int fd) {
int ret = ::read(fd, mBuffer, mBufferSize);
- if (ret >= 0)
+ if (ret < 0) {
+ // file read error
+ return ret;
+ }
+
+ // request packet should have 12 byte header followed by 0 to 5 32-bit arguments
+ if (ret >= MTP_CONTAINER_HEADER_SIZE
+ && ret <= MTP_CONTAINER_HEADER_SIZE + 5 * sizeof(uint32_t)
+ && ((ret - MTP_CONTAINER_HEADER_SIZE) & 3) == 0) {
mPacketSize = ret;
- else
- mPacketSize = 0;
+ mParameterCount = (ret - MTP_CONTAINER_HEADER_SIZE) / sizeof(uint32_t);
+ } else {
+ ALOGE("Malformed MTP request packet");
+ ret = -1;
+ }
return ret;
}
#endif
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
index 1201f11..79b798d 100644
--- a/media/mtp/MtpRequestPacket.h
+++ b/media/mtp/MtpRequestPacket.h
@@ -43,6 +43,10 @@ public:
inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
inline void setOperationCode(MtpOperationCode code)
{ return setContainerCode(code); }
+ inline int getParameterCount() const { return mParameterCount; }
+
+private:
+ int mParameterCount;
};
}; // namespace android
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index aa43967..931a09d 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -495,6 +495,9 @@ MtpResponseCode MtpServer::doOpenSession() {
mResponse.setParameter(1, mSessionID);
return MTP_RESPONSE_SESSION_ALREADY_OPEN;
}
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
mSessionID = mRequest.getParameter(1);
mSessionOpen = true;
@@ -529,6 +532,9 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
MtpStorageID id = mRequest.getParameter(1);
MtpStorage* storage = getStorage(id);
if (!storage)
@@ -550,6 +556,8 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
MtpResponseCode MtpServer::doGetObjectPropsSupported() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectFormat format = mRequest.getParameter(1);
MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format);
mData.putAUInt16(properties);
@@ -560,6 +568,8 @@ MtpResponseCode MtpServer::doGetObjectPropsSupported() {
MtpResponseCode MtpServer::doGetObjectHandles() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -577,6 +587,8 @@ MtpResponseCode MtpServer::doGetObjectHandles() {
MtpResponseCode MtpServer::doGetNumObjects() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -599,6 +611,8 @@ MtpResponseCode MtpServer::doGetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// FIXME - check for invalid object handle
@@ -617,9 +631,13 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID handle = mRequest.getParameter(1);
MtpObjectHandleList* references = mData.getAUInt32();
+ if (!references)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
delete references;
return result;
@@ -628,6 +646,8 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
MtpResponseCode MtpServer::doGetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("GetObjectPropValue %d %s\n", handle,
@@ -639,6 +659,8 @@ MtpResponseCode MtpServer::doGetObjectPropValue() {
MtpResponseCode MtpServer::doSetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("SetObjectPropValue %d %s\n", handle,
@@ -648,6 +670,8 @@ MtpResponseCode MtpServer::doSetObjectPropValue() {
}
MtpResponseCode MtpServer::doGetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("GetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -656,6 +680,8 @@ MtpResponseCode MtpServer::doGetDevicePropValue() {
}
MtpResponseCode MtpServer::doSetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("SetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -664,6 +690,8 @@ MtpResponseCode MtpServer::doSetDevicePropValue() {
}
MtpResponseCode MtpServer::doResetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("ResetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -674,6 +702,8 @@ MtpResponseCode MtpServer::doResetDevicePropValue() {
MtpResponseCode MtpServer::doGetObjectPropList() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 5)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// use uint32_t so we can support 0xFFFFFFFF
@@ -691,6 +721,8 @@ MtpResponseCode MtpServer::doGetObjectPropList() {
MtpResponseCode MtpServer::doGetObjectInfo() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectInfo info(handle);
MtpResponseCode result = mDatabase->getObjectInfo(handle, info);
@@ -732,6 +764,8 @@ MtpResponseCode MtpServer::doGetObjectInfo() {
MtpResponseCode MtpServer::doGetObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpString pathBuf;
int64_t fileLength;
@@ -765,6 +799,8 @@ MtpResponseCode MtpServer::doGetObject() {
}
MtpResponseCode MtpServer::doGetThumb() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
size_t thumbSize;
void* thumb = mDatabase->getThumbnail(handle, thumbSize);
@@ -783,6 +819,8 @@ MtpResponseCode MtpServer::doGetThumb() {
MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset;
uint32_t length;
@@ -832,6 +870,11 @@ MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
MtpResponseCode MtpServer::doSendObjectInfo() {
MtpString path;
+ uint16_t temp16;
+ uint32_t temp32;
+
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1);
MtpStorage* storage = getStorage(storageID);
MtpObjectHandle parent = mRequest.getParameter(2);
@@ -853,25 +896,29 @@ MtpResponseCode MtpServer::doSendObjectInfo() {
}
// read only the fields we need
- mData.getUInt32(); // storage ID
- MtpObjectFormat format = mData.getUInt16();
- mData.getUInt16(); // protection status
- mSendObjectFileSize = mData.getUInt32();
- mData.getUInt16(); // thumb format
- mData.getUInt32(); // thumb compressed size
- mData.getUInt32(); // thumb pix width
- mData.getUInt32(); // thumb pix height
- mData.getUInt32(); // image pix width
- mData.getUInt32(); // image pix height
- mData.getUInt32(); // image bit depth
- mData.getUInt32(); // parent
- uint16_t associationType = mData.getUInt16();
- uint32_t associationDesc = mData.getUInt32(); // association desc
- mData.getUInt32(); // sequence number
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // storage ID
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ MtpObjectFormat format = temp16;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // protection status
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ mSendObjectFileSize = temp32;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb format
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb compressed size
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image bit depth
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // parent
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint16_t associationType = temp16;
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint32_t associationDesc = temp32; // association desc
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // sequence number
MtpStringBuffer name, created, modified;
- mData.getString(name); // file name
- mData.getString(created); // date created
- mData.getString(modified); // date modified
+ if (!mData.getString(name)) return MTP_RESPONSE_INVALID_PARAMETER; // file name
+ if (!mData.getString(created)) return MTP_RESPONSE_INVALID_PARAMETER; // date created
+ if (!mData.getString(modified)) return MTP_RESPONSE_INVALID_PARAMETER; // date modified
// keywords follow
ALOGV("name: %s format: %04X\n", (const char *)name, format);
@@ -1066,6 +1113,8 @@ static void deletePath(const char* path) {
MtpResponseCode MtpServer::doDeleteObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectFormat format = mRequest.getParameter(2);
// FIXME - support deleting all objects if handle is 0xFFFFFFFF
@@ -1087,6 +1136,8 @@ MtpResponseCode MtpServer::doDeleteObject() {
}
MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectProperty propCode = mRequest.getParameter(1);
MtpObjectFormat format = mRequest.getParameter(2);
ALOGV("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
@@ -1100,6 +1151,8 @@ MtpResponseCode MtpServer::doGetObjectPropDesc() {
}
MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty propCode = mRequest.getParameter(1);
ALOGV("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
@@ -1113,6 +1166,8 @@ MtpResponseCode MtpServer::doGetDevicePropDesc() {
MtpResponseCode MtpServer::doSendPartialObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset = mRequest.getParameter(2);
uint64_t offset2 = mRequest.getParameter(3);
@@ -1180,6 +1235,8 @@ MtpResponseCode MtpServer::doSendPartialObject() {
}
MtpResponseCode MtpServer::doTruncateObject() {
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
@@ -1199,6 +1256,8 @@ MtpResponseCode MtpServer::doTruncateObject() {
}
MtpResponseCode MtpServer::doBeginEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
if (getEditObject(handle)) {
ALOGE("object already open for edit in doBeginEditObject");
@@ -1223,6 +1282,8 @@ MtpResponseCode MtpServer::doBeginEditObject() {
}
MtpResponseCode MtpServer::doEndEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
index 2b1a9ae..5d4ebbf 100644
--- a/media/mtp/MtpStorageInfo.cpp
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -45,21 +45,23 @@ MtpStorageInfo::~MtpStorageInfo() {
free(mVolumeIdentifier);
}
-void MtpStorageInfo::read(MtpDataPacket& packet) {
+bool MtpStorageInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStorageType = packet.getUInt16();
- mFileSystemType = packet.getUInt16();
- mAccessCapability = packet.getUInt16();
- mMaxCapacity = packet.getUInt64();
- mFreeSpaceBytes = packet.getUInt64();
- mFreeSpaceObjects = packet.getUInt32();
+ if (!packet.getUInt16(mStorageType)) return false;
+ if (!packet.getUInt16(mFileSystemType)) return false;
+ if (!packet.getUInt16(mAccessCapability)) return false;
+ if (!packet.getUInt64(mMaxCapacity)) return false;
+ if (!packet.getUInt64(mFreeSpaceBytes)) return false;
+ if (!packet.getUInt32(mFreeSpaceObjects)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mStorageDescription = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVolumeIdentifier = strdup((const char *)string);
+
+ return true;
}
void MtpStorageInfo::print() {
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
index 2cb626e..35a8189 100644
--- a/media/mtp/MtpStorageInfo.h
+++ b/media/mtp/MtpStorageInfo.h
@@ -39,7 +39,7 @@ public:
MtpStorageInfo(MtpStorageID id);
virtual ~MtpStorageInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
index f3420a4..df04694 100644
--- a/media/mtp/MtpStringBuffer.cpp
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -123,11 +123,17 @@ void MtpStringBuffer::set(const uint16_t* src) {
mByteCount = dest - mBuffer;
}
-void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
- int count = packet->getUInt8();
+bool MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ uint8_t count;
+ if (!packet->getUInt8(count))
+ return false;
+
uint8_t* dest = mBuffer;
for (int i = 0; i < count; i++) {
- uint16_t ch = packet->getUInt16();
+ uint16_t ch;
+
+ if (!packet->getUInt16(ch))
+ return false;
if (ch >= 0x0800) {
*dest++ = (uint8_t)(0xE0 | (ch >> 12));
*dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
@@ -142,6 +148,7 @@ void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
*dest++ = 0;
mCharCount = count;
mByteCount = dest - mBuffer;
+ return true;
}
void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
index e5150df..85d91e8 100644
--- a/media/mtp/MtpStringBuffer.h
+++ b/media/mtp/MtpStringBuffer.h
@@ -46,7 +46,7 @@ public:
void set(const char* src);
void set(const uint16_t* src);
- void readFromPacket(MtpDataPacket* packet);
+ bool readFromPacket(MtpDataPacket* packet);
void writeToPacket(MtpDataPacket* packet) const;
inline int getCharCount() const { return mCharCount; }
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 970a43c..db57d0b 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -256,18 +256,23 @@ PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
len -= datalen;
}
- // there are <numentries> in the buffer, we need
- // (source buffer size) + 4 + (4 * numentries) bytes for the PsshInfo structure
- size_t newsize = buffer->size() + 4 + (4 * numentries);
+ // there are <numentries> in the source buffer, we need
+ // (source buffer size) - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ // + ((sizeof(void*) + sizeof(size_t)) * numentries) bytes for the PsshInfo structure
+ // Or in other words, the data lengths in the source structure are replaced by size_t
+ // (which may be the same size or larger, for 64 bit), and in addition there is an
+ // extra pointer for each entry, and an extra size_t for the entire PsshInfo.
+ size_t newsize = buffer->size() - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ + ((sizeof(void*) + sizeof(size_t)) * numentries);
ex->mPsshBuf = new ABuffer(newsize);
ex->mPsshBuf->setRange(0, newsize);
// copy data
const uint8_t* src = buffer->data();
uint8_t* dst = ex->mPsshBuf->data();
- uint8_t* dstdata = dst + 4 + numentries * sizeof(PsshEntry);
- *((uint32_t*)dst) = numentries;
- dst += 4;
+ uint8_t* dstdata = dst + sizeof(size_t) + numentries * sizeof(PsshEntry);
+ *((size_t*)dst) = numentries;
+ dst += sizeof(size_t);
for (size_t i = 0; i < numentries; i++) {
// copy uuid
memcpy(dst, src, 16);
@@ -276,14 +281,14 @@ PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
// get/copy data length
uint32_t datalen = *((uint32_t*)src);
- memcpy(dst, src, 4);
- src += 4;
- dst += 4;
+ *((size_t*)dst) = datalen;
+ src += sizeof(uint32_t);
+ dst += sizeof(size_t);
// the next entry in the destination is a pointer to the actual data, which we store
// after the array of PsshEntry
- memcpy(dst, &dstdata, sizeof(dstdata));
- dst += 4;
+ *((void**)dst) = dstdata;
+ dst += sizeof(void*);
// copy the actual data
memcpy(dstdata, src, datalen);