summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/VendorTagDescriptor.cpp2
-rw-r--r--cmds/screenrecord/FrameOutput.cpp2
-rw-r--r--cmds/screenrecord/FrameOutput.h2
-rw-r--r--cmds/screenrecord/Overlay.cpp2
-rw-r--r--cmds/screenrecord/Overlay.h2
-rw-r--r--drm/mediadrm/plugins/clearkey/DrmPlugin.h10
-rw-r--r--drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp25
-rw-r--r--drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h2
-rw-r--r--include/camera/ProCamera.h2
-rw-r--r--include/media/AudioEffect.h9
-rw-r--r--include/media/AudioSystem.h9
-rw-r--r--include/media/ICrypto.h3
-rw-r--r--include/media/IDrm.h3
-rw-r--r--include/media/MediaPlayerInterface.h11
-rw-r--r--include/media/stagefright/ACodec.h1
-rw-r--r--include/media/stagefright/MediaCodec.h1
-rw-r--r--include/media/stagefright/MediaCodecSource.h4
-rw-r--r--include/media/stagefright/SurfaceMediaSource.h2
-rw-r--r--include/media/stagefright/foundation/ADebug.h30
-rw-r--r--include/media/stagefright/foundation/AStringUtils.h36
-rw-r--r--media/libmedia/AudioSystem.cpp76
-rw-r--r--media/libmedia/AudioTrack.cpp59
-rw-r--r--media/libmedia/AudioTrackShared.cpp27
-rw-r--r--media/libmedia/IAudioPolicyService.cpp15
-rw-r--r--media/libmedia/ICrypto.cpp22
-rw-r--r--media/libmedia/IDrm.cpp43
-rw-r--r--media/libmedia/Visualizer.cpp19
-rw-r--r--media/libmediaplayerservice/Crypto.cpp8
-rw-r--r--media/libmediaplayerservice/Crypto.h2
-rw-r--r--media/libmediaplayerservice/Drm.cpp28
-rw-r--r--media/libmediaplayerservice/Drm.h2
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp4
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp4
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.cpp35
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.h2
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp4
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp338
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h18
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp119
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h17
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp85
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h13
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp36
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp84
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h9
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp2
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.cpp4
-rw-r--r--media/libstagefright/ACodec.cpp30
-rw-r--r--media/libstagefright/AwesomePlayer.cpp2
-rw-r--r--media/libstagefright/CameraSource.cpp100
-rw-r--r--media/libstagefright/MPEG4Writer.cpp5
-rw-r--r--media/libstagefright/MediaCodec.cpp21
-rw-r--r--media/libstagefright/MediaCodecSource.cpp269
-rw-r--r--media/libstagefright/OMXCodec.cpp326
-rw-r--r--media/libstagefright/OggExtractor.cpp75
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp2
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp31
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp4
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp1
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp4
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp1
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp4
-rw-r--r--media/libstagefright/foundation/ADebug.cpp117
-rw-r--r--media/libstagefright/foundation/AStringUtils.cpp77
-rw-r--r--media/libstagefright/foundation/Android.mk3
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp2
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp73
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h18
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp9
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp65
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h3
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp6
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp2
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h2
-rw-r--r--media/libstagefright/omx/OMX.cpp2
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp570
-rw-r--r--media/libstagefright/omx/SoftOMXComponent.cpp2
-rw-r--r--media/libstagefright/tests/Utils_test.cpp95
-rw-r--r--media/mtp/MtpDataPacket.cpp160
-rw-r--r--media/mtp/MtpDataPacket.h25
-rw-r--r--media/mtp/MtpDevice.cpp32
-rw-r--r--media/mtp/MtpDevice.h2
-rw-r--r--media/mtp/MtpDeviceInfo.cpp33
-rw-r--r--media/mtp/MtpDeviceInfo.h4
-rw-r--r--media/mtp/MtpObjectInfo.cpp42
-rw-r--r--media/mtp/MtpObjectInfo.h2
-rw-r--r--media/mtp/MtpPacket.cpp2
-rw-r--r--media/mtp/MtpPacket.h8
-rw-r--r--media/mtp/MtpProperty.cpp93
-rw-r--r--media/mtp/MtpProperty.h14
-rw-r--r--media/mtp/MtpRequestPacket.cpp20
-rw-r--r--media/mtp/MtpRequestPacket.h4
-rw-r--r--media/mtp/MtpServer.cpp97
-rw-r--r--media/mtp/MtpStorageInfo.cpp20
-rw-r--r--media/mtp/MtpStorageInfo.h2
-rw-r--r--media/mtp/MtpStringBuffer.cpp13
-rw-r--r--media/mtp/MtpStringBuffer.h2
-rw-r--r--media/ndk/NdkMediaExtractor.cpp27
-rw-r--r--services/audioflinger/AudioFlinger.cpp73
-rw-r--r--services/audioflinger/AudioFlinger.h3
-rw-r--r--services/audioflinger/ServiceUtilities.cpp7
-rw-r--r--services/audioflinger/ServiceUtilities.h1
-rw-r--r--services/audioflinger/Threads.cpp6
-rw-r--r--services/audiopolicy/AudioPolicyEffects.cpp77
-rw-r--r--services/audiopolicy/AudioPolicyInterfaceImpl.cpp35
-rw-r--r--services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp35
-rw-r--r--services/audiopolicy/AudioPolicyManager.cpp217
-rw-r--r--services/audiopolicy/AudioPolicyManager.h31
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp20
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp49
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp100
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h28
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.h2
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.cpp16
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.h4
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.cpp4
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.h2
126 files changed, 2912 insertions, 1572 deletions
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 0dda6b6..dce313a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -206,7 +206,7 @@ status_t VendorTagDescriptor::createFromParcel(const Parcel* parcel,
return res;
}
- size_t sectionCount;
+ size_t sectionCount = 0;
if (tagCount > 0) {
if ((res = parcel->readInt32(reinterpret_cast<int32_t*>(&sectionCount))) != OK) {
ALOGE("%s: could not read section count for.", __FUNCTION__);
diff --git a/cmds/screenrecord/FrameOutput.cpp b/cmds/screenrecord/FrameOutput.cpp
index 03e0062..bef74f5 100644
--- a/cmds/screenrecord/FrameOutput.cpp
+++ b/cmds/screenrecord/FrameOutput.cpp
@@ -206,7 +206,7 @@ void FrameOutput::reduceRgbaToRgb(uint8_t* buf, unsigned int pixelCount) {
}
// Callback; executes on arbitrary thread.
-void FrameOutput::onFrameAvailable() {
+void FrameOutput::onFrameAvailable(const BufferItem& /* item */) {
Mutex::Autolock _l(mMutex);
mFrameAvailable = true;
mEventCond.signal();
diff --git a/cmds/screenrecord/FrameOutput.h b/cmds/screenrecord/FrameOutput.h
index c49ec3b..4c0c3be 100644
--- a/cmds/screenrecord/FrameOutput.h
+++ b/cmds/screenrecord/FrameOutput.h
@@ -62,7 +62,7 @@ private:
}
// (overrides GLConsumer::FrameAvailableListener method)
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// Reduces RGBA to RGB, in place.
static void reduceRgbaToRgb(uint8_t* buf, unsigned int pixelCount);
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 7fef53d..c659170 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -274,7 +274,7 @@ void Overlay::getTimeString_l(nsecs_t monotonicNsec, char* buf, size_t bufLen) {
}
// Callback; executes on arbitrary thread.
-void Overlay::onFrameAvailable() {
+void Overlay::onFrameAvailable(const BufferItem& /* item */) {
ALOGV("Overlay::onFrameAvailable");
Mutex::Autolock _l(mMutex);
mFrameAvailable = true;
diff --git a/cmds/screenrecord/Overlay.h b/cmds/screenrecord/Overlay.h
index b1b5c29..ee3444d 100644
--- a/cmds/screenrecord/Overlay.h
+++ b/cmds/screenrecord/Overlay.h
@@ -78,7 +78,7 @@ private:
const Program& texRender, TextRenderer& textRenderer);
// (overrides GLConsumer::FrameAvailableListener method)
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// (overrides Thread method)
virtual bool threadLoop();
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
index 27df9cd..6139f1f 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
@@ -113,11 +113,21 @@ public:
return android::ERROR_DRM_CANNOT_HANDLE;
}
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ UNUSED(ssid);
+ UNUSED(secureStop);
+ return android::ERROR_DRM_CANNOT_HANDLE;
+ }
+
virtual status_t releaseSecureStops(const Vector<uint8_t>& ssRelease) {
UNUSED(ssRelease);
return android::ERROR_DRM_CANNOT_HANDLE;
}
+ virtual status_t releaseAllSecureStops() {
+ return android::ERROR_DRM_CANNOT_HANDLE;
+ }
+
virtual status_t getPropertyString(
const String8& name, String8& value) const;
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
index 2ea554b..7eac0a1 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
@@ -305,6 +305,24 @@ namespace android {
return OK;
}
+ status_t MockDrmPlugin::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop)
+ {
+ Mutex::Autolock lock(mLock);
+ ALOGD("MockDrmPlugin::getSecureStop()");
+
+ // Properties used in mock test, set by cts test app returned from mock plugin
+ // byte[] mock-secure-stop -> first secure stop in list
+
+ ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop"));
+ if (index < 0) {
+ ALOGD("Missing 'mock-secure-stop' parameter for mock");
+ return BAD_VALUE;
+ } else {
+ secureStop = mByteArrayProperties.valueAt(index);
+ }
+ return OK;
+ }
+
status_t MockDrmPlugin::getSecureStops(List<Vector<uint8_t> > &secureStops)
{
Mutex::Autolock lock(mLock);
@@ -349,6 +367,13 @@ namespace android {
return OK;
}
+ status_t MockDrmPlugin::releaseAllSecureStops()
+ {
+ Mutex::Autolock lock(mLock);
+ ALOGD("MockDrmPlugin::releaseAllSecureStops()");
+ return OK;
+ }
+
status_t MockDrmPlugin::getPropertyString(String8 const &name, String8 &value) const
{
ALOGD("MockDrmPlugin::getPropertyString(name=%s)", name.string());
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
index 4b63299..d1d8058 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
@@ -88,7 +88,9 @@ namespace android {
status_t unprovisionDevice();
status_t getSecureStops(List<Vector<uint8_t> > &secureStops);
+ status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
+ status_t releaseAllSecureStops();
status_t getPropertyString(String8 const &name, String8 &value ) const;
status_t getPropertyByteArray(String8 const &name,
diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h
index 83a3028..e9b687a 100644
--- a/include/camera/ProCamera.h
+++ b/include/camera/ProCamera.h
@@ -265,7 +265,7 @@ private:
}
protected:
- virtual void onFrameAvailable() {
+ virtual void onFrameAvailable(const BufferItem& /* item */) {
sp<ProCamera> c = mCamera.promote();
if (c.get() != NULL) {
c->onFrameAvailable(mStreamId);
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 4932d40..583695d 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -133,10 +133,11 @@ public:
*
* Returned value
* *descriptor updated with descriptors of pre processings enabled by default
- * *count number of descriptors returned if returned status is N_ERROR.
+ * *count number of descriptors returned if returned status is NO_ERROR.
* total number of pre processing enabled by default if returned status is
* NO_MEMORY. This happens if the count passed as input is less than the number
- * of descriptors to return
+ * of descriptors to return.
+ * *count is limited to kMaxPreProcessing on return.
*/
static status_t queryDefaultPreProcessing(int audioSession,
effect_descriptor_t *descriptors,
@@ -391,6 +392,10 @@ public:
*/
static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen);
+ // kMaxPreProcessing is a reasonable value for the maximum number of preprocessing effects
+ // that can be applied simultaneously.
+ static const uint32_t kMaxPreProcessing = 10;
+
protected:
bool mEnabled; // enable state
int32_t mSessionId; // audio session ID
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index f8c0198..f3b7fbb 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -99,8 +99,6 @@ public:
// to be non-zero if status == NO_ERROR
static status_t getOutputSamplingRate(uint32_t* samplingRate,
audio_stream_type_t stream);
- static status_t getOutputSamplingRateForAttr(uint32_t* samplingRate,
- const audio_attributes_t *attr);
static status_t getOutputFrameCount(size_t* frameCount,
audio_stream_type_t stream);
static status_t getOutputLatency(uint32_t* latency,
@@ -116,8 +114,6 @@ public:
static status_t getLatency(audio_io_handle_t output,
uint32_t* latency);
- static bool routedToA2dpOutput(audio_stream_type_t streamType);
-
// return status NO_ERROR implies *buffSize > 0
static status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize);
@@ -377,7 +373,10 @@ private:
friend class AudioFlingerClient;
friend class AudioPolicyServiceClient;
- static Mutex gLock;
+ static Mutex gLock; // protects all members except gAudioPolicyService,
+ // gAudioPolicyServiceClient, and gAudioPortCallback
+ static Mutex gLockAPS; // protects gAudioPolicyService and gAudioPolicyServiceClient
+ static Mutex gLockAPC; // protects gAudioPortCallback
static sp<IAudioFlinger> gAudioFlinger;
static audio_error_callback gAudioErrorCallback;
diff --git a/include/media/ICrypto.h b/include/media/ICrypto.h
index 9dcb8d9..07742ca 100644
--- a/include/media/ICrypto.h
+++ b/include/media/ICrypto.h
@@ -41,6 +41,8 @@ struct ICrypto : public IInterface {
virtual bool requiresSecureDecoderComponent(
const char *mime) const = 0;
+ virtual void notifyResolution(uint32_t width, uint32_t height) = 0;
+
virtual ssize_t decrypt(
bool secure,
const uint8_t key[16],
@@ -64,4 +66,3 @@ struct BnCrypto : public BnInterface<ICrypto> {
} // namespace android
#endif // ANDROID_ICRYPTO_H_
-
diff --git a/include/media/IDrm.h b/include/media/IDrm.h
index 68de87a..affcbd7 100644
--- a/include/media/IDrm.h
+++ b/include/media/IDrm.h
@@ -73,8 +73,10 @@ struct IDrm : public IInterface {
virtual status_t unprovisionDevice() = 0;
virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops) = 0;
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) = 0;
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) = 0;
+ virtual status_t releaseAllSecureStops() = 0;
virtual status_t getPropertyString(String8 const &name, String8 &value) const = 0;
virtual status_t getPropertyByteArray(String8 const &name,
@@ -137,4 +139,3 @@ private:
} // namespace android
#endif // ANDROID_IDRM_H_
-
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index cf18a45..c412299 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -208,8 +208,15 @@ public:
void sendEvent(int msg, int ext1=0, int ext2=0,
const Parcel *obj=NULL) {
- Mutex::Autolock autoLock(mNotifyLock);
- if (mNotify) mNotify(mCookie, msg, ext1, ext2, obj);
+ notify_callback_f notifyCB;
+ void* cookie;
+ {
+ Mutex::Autolock autoLock(mNotifyLock);
+ notifyCB = mNotify;
+ cookie = mCookie;
+ }
+
+ if (notifyCB) notifyCB(cookie, msg, ext1, ext2, obj);
}
virtual status_t dump(int fd, const Vector<String16> &args) const {
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index d77ddaf..fcccc6d 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -120,6 +120,7 @@ private:
kWhatSetParameters = 'setP',
kWhatSubmitOutputMetaDataBufferIfEOS = 'subm',
kWhatOMXDied = 'OMXd',
+ kWhatReleaseCodecInstance = 'relC',
};
enum {
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index bca78b9..54a4e8b 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -205,6 +205,7 @@ private:
kFlagIsEncoder = 256,
kFlagGatherCodecSpecificData = 512,
kFlagIsAsync = 1024,
+ kFlagIsComponentAllocated = 2048,
};
struct BufferInfo {
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index 3629c8b..0970b2b 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -85,8 +85,6 @@ private:
status_t initEncoder();
void releaseEncoder();
status_t feedEncoderInputBuffers();
- void scheduleDoMoreWork();
- status_t doMoreWork(int32_t numInput, int32_t numOutput);
void suspend();
void resume(int64_t skipFramesBeforeUs = -1ll);
void signalEOS(status_t err = ERROR_END_OF_STREAM);
@@ -108,8 +106,6 @@ private:
bool mDoMoreWorkPending;
sp<AMessage> mEncoderActivityNotify;
sp<IGraphicBufferProducer> mGraphicBufferProducer;
- Vector<sp<ABuffer> > mEncoderInputBuffers;
- Vector<sp<ABuffer> > mEncoderOutputBuffers;
List<MediaBuffer *> mInputBufferQueue;
List<size_t> mAvailEncoderInputIndices;
List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index ffe4f4c..2177c00 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -126,7 +126,7 @@ protected:
// Implementation of the BufferQueue::ConsumerListener interface. These
// calls are used to notify the Surface of asynchronous events in the
// BufferQueue.
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// Used as a hook to BufferQueue::disconnect()
// This is called by the client side when it is done
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 450dcfe..1d0e2cb 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -80,6 +80,36 @@ MAKE_COMPARATOR(GT,>)
__FILE__ ":" LITERAL_TO_STRING(__LINE__) \
" Should not be here.");
+struct ADebug {
+ enum Level {
+ kDebugNone, // no debug
+ kDebugLifeCycle, // lifecycle events: creation/deletion
+ kDebugState, // commands and events
+ kDebugConfig, // configuration
+ kDebugInternalState, // internal state changes
+ kDebugAll, // all
+ kDebugMax = kDebugAll,
+
+ };
+
+ // parse the property or string to get the debug level for a component name
+ // string format is:
+ // <level>[:<glob>][,<level>[:<glob>]...]
+ // - <level> is 0-5 corresponding to ADebug::Level
+ // - <glob> is used to match component name case insensitively, if omitted, it
+ // matches all components
+ // - string is read left-to-right, and the last matching level is returned, or
+ // the def if no terms matched
+ static Level GetDebugLevelFromProperty(
+ const char *name, const char *propertyName, Level def = kDebugNone);
+ static Level GetDebugLevelFromString(
+ const char *name, const char *value, Level def = kDebugNone);
+
+ // remove redundant segments of a codec name, and return a newly allocated
+ // string suitable for debugging
+ static char *GetDebugName(const char *name);
+};
+
} // namespace android
#endif // A_DEBUG_H_
diff --git a/include/media/stagefright/foundation/AStringUtils.h b/include/media/stagefright/foundation/AStringUtils.h
new file mode 100644
index 0000000..76a7791
--- /dev/null
+++ b/include/media/stagefright/foundation/AStringUtils.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_STRING_UTILS_H_
+#define A_STRING_UTILS_H_
+
+#include <stdlib.h>
+
+namespace android {
+
+struct AStringUtils {
+ // similar to strncmp or strcasecmp, but case sensitivity is parametric
+ static int Compare(const char *a, const char *b, size_t len, bool ignoreCase);
+
+ // matches a string (str) to a glob pattern that supports:
+ // * - matches any number of characters
+ static bool MatchesGlob(
+ const char *glob, size_t globLen, const char *str, size_t strLen, bool ignoreCase);
+};
+
+} // namespace android
+
+#endif // A_STRING_UTILS_H_
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index dda3657..fce4389 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -32,6 +32,8 @@ namespace android {
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockAPS;
+Mutex AudioSystem::gLockAPC;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
@@ -70,9 +72,9 @@ const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
}
binder->linkToDeath(gAudioFlingerClient);
gAudioFlinger = interface_cast<IAudioFlinger>(binder);
+ LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
gAudioFlinger->registerClient(gAudioFlingerClient);
}
- ALOGE_IF(gAudioFlinger==0, "no AudioFlinger!?");
return gAudioFlinger;
}
@@ -245,19 +247,6 @@ status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream
return getSamplingRate(output, samplingRate);
}
-status_t AudioSystem::getOutputSamplingRateForAttr(uint32_t* samplingRate,
- const audio_attributes_t *attr)
-{
- if (attr == NULL) {
- return BAD_VALUE;
- }
- audio_io_handle_t output = getOutputForAttr(attr);
- if (output == 0) {
- return PERMISSION_DENIED;
- }
- return getSamplingRate(output, samplingRate);
-}
-
status_t AudioSystem::getSamplingRate(audio_io_handle_t output,
uint32_t* samplingRate)
{
@@ -543,22 +532,8 @@ void AudioSystem::setErrorCallback(audio_error_callback cb)
gAudioErrorCallback = cb;
}
-
-bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType)
-{
- switch (streamType) {
- case AUDIO_STREAM_MUSIC:
- case AUDIO_STREAM_VOICE_CALL:
- case AUDIO_STREAM_BLUETOOTH_SCO:
- case AUDIO_STREAM_SYSTEM:
- return true;
- default:
- return false;
- }
-}
-
-
// client singleton for AudioPolicyService binder interface
+// protected by gLockAPS
sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
@@ -566,7 +541,7 @@ sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient
// establish binder interface to AudioPolicy service
const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
{
- gLock.lock();
+ Mutex::Autolock _l(gLockAPS);
if (gAudioPolicyService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
@@ -582,15 +557,10 @@ const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
}
binder->linkToDeath(gAudioPolicyServiceClient);
gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
- gLock.unlock();
- // Registering the client takes the AudioPolicyService lock.
- // Don't hold the AudioSystem lock at the same time.
+ LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
gAudioPolicyService->registerClient(gAudioPolicyServiceClient);
- } else {
- // There exists a benign race condition where gAudioPolicyService
- // is set, but gAudioPolicyServiceClient is not yet registered.
- gLock.unlock();
}
+
return gAudioPolicyService;
}
@@ -856,9 +826,18 @@ status_t AudioSystem::setLowRamDevice(bool isLowRamDevice)
void AudioSystem::clearAudioConfigCache()
{
- Mutex::Autolock _l(gLock);
+ // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- gOutputs.clear();
+ {
+ Mutex::Autolock _l(gLock);
+ gOutputs.clear();
+ gAudioFlinger.clear();
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ gAudioPolicyService.clear();
+ }
+ // Do not clear gAudioPortCallback
}
bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
@@ -920,7 +899,7 @@ status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)
void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack)
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
gAudioPortCallback = callBack;
}
@@ -952,18 +931,23 @@ audio_mode_t AudioSystem::getPhoneState()
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
{
- Mutex::Autolock _l(gLock);
- if (gAudioPortCallback != 0) {
- gAudioPortCallback->onServiceDied();
+ {
+ Mutex::Autolock _l(gLockAPC);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onServiceDied();
+ }
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ AudioSystem::gAudioPolicyService.clear();
}
- AudioSystem::gAudioPolicyService.clear();
ALOGW("AudioPolicyService server died!");
}
void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
if (gAudioPortCallback != 0) {
gAudioPortCallback->onAudioPortListUpdate();
}
@@ -971,7 +955,7 @@ void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
{
- Mutex::Autolock _l(gLock);
+ Mutex::Autolock _l(gLockAPC);
if (gAudioPortCallback != 0) {
gAudioPortCallback->onAudioPatchListUpdate();
}
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 5379809..b45a420 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -278,7 +278,9 @@ status_t AudioTrack::set(
}
// handle default values first.
- if (streamType == AUDIO_STREAM_DEFAULT) {
+ // TODO once AudioPolicyManager fully supports audio_attributes_t,
+ // remove stream "text-to-speech" redirect
+ if ((streamType == AUDIO_STREAM_DEFAULT) || (streamType == AUDIO_STREAM_TTS)) {
streamType = AUDIO_STREAM_MUSIC;
}
@@ -302,17 +304,6 @@ status_t AudioTrack::set(
mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
}
- status_t status;
- if (sampleRate == 0) {
- status = AudioSystem::getOutputSamplingRateForAttr(&sampleRate, &mAttributes);
- if (status != NO_ERROR) {
- ALOGE("Could not get output sample rate for stream type %d; status %d",
- mStreamType, status);
- return status;
- }
- }
- mSampleRate = sampleRate;
-
// these below should probably come from the audioFlinger too...
if (format == AUDIO_FORMAT_DEFAULT) {
format = AUDIO_FORMAT_PCM_16_BIT;
@@ -371,6 +362,12 @@ status_t AudioTrack::set(
// so no need to check for specific PCM formats here
}
+ // sampling rate must be specified for direct outputs
+ if (sampleRate == 0 && (flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
+ return BAD_VALUE;
+ }
+ mSampleRate = sampleRate;
+
// Make copy of input parameter offloadInfo so that in the future:
// (a) createTrack_l doesn't need it as an input parameter
// (b) we can support re-creation of offloaded tracks
@@ -411,7 +408,7 @@ status_t AudioTrack::set(
}
// create the IAudioTrack
- status = createTrack_l();
+ status_t status = createTrack_l();
if (status != NO_ERROR) {
if (mAudioTrackThread != 0) {
@@ -678,15 +675,18 @@ status_t AudioTrack::setSampleRate(uint32_t rate)
return INVALID_OPERATION;
}
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE) {
+ return NO_INIT;
+ }
uint32_t afSamplingRate;
- if (AudioSystem::getOutputSamplingRateForAttr(&afSamplingRate, &mAttributes) != NO_ERROR) {
+ if (AudioSystem::getSamplingRate(mOutput, &afSamplingRate) != NO_ERROR) {
return NO_INIT;
}
if (rate == 0 || rate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {
return BAD_VALUE;
}
- AutoMutex lock(mLock);
mSampleRate = rate;
mProxy->setSampleRate(rate);
@@ -961,7 +961,9 @@ status_t AudioTrack::createTrack_l()
ALOGE("getSamplingRate(output=%d) status %d", output, status);
goto release;
}
-
+ if (mSampleRate == 0) {
+ mSampleRate = afSampleRate;
+ }
// Client decides whether the track is TIMED (see below), but can only express a preference
// for FAST. Server will perform additional tests.
if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) && !((
@@ -1826,7 +1828,7 @@ status_t AudioTrack::restoreTrack_l(const char *from)
status_t result;
// refresh the audio configuration cache in this process to make sure we get new
- // output parameters in createTrack_l()
+ // output parameters and new IAudioFlinger in createTrack_l()
AudioSystem::clearAudioConfigCache();
if (isOffloadedOrDirect_l()) {
@@ -2124,17 +2126,30 @@ void AudioTrack::setStreamTypeFromAttributes(audio_attributes_t& aa) {
mStreamType = AUDIO_STREAM_BLUETOOTH_SCO;
return;
}
+ // TODO once AudioPolicyManager fully supports audio_attributes_t,
+ // remove stream remap, the flag will be enough
+ if ((aa.flags & AUDIO_FLAG_BEACON) == AUDIO_FLAG_BEACON) {
+ mStreamType = AUDIO_STREAM_TTS;
+ return;
+ }
// usage to stream type mapping
switch (aa.usage) {
- case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+ case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY: {
// TODO once AudioPolicyManager fully supports audio_attributes_t,
- // remove stream change based on phone state
- if (AudioSystem::getPhoneState() == AUDIO_MODE_RINGTONE) {
+ // remove stream change based on stream activity
+ bool active;
+ status_t status = AudioSystem::isStreamActive(AUDIO_STREAM_RING, &active, 0);
+ if (status == NO_ERROR && active == true) {
mStreamType = AUDIO_STREAM_RING;
break;
}
- /// FALL THROUGH
+ status = AudioSystem::isStreamActive(AUDIO_STREAM_ALARM, &active, 0);
+ if (status == NO_ERROR && active == true) {
+ mStreamType = AUDIO_STREAM_ALARM;
+ break;
+ }
+ } /// FALL THROUGH
case AUDIO_USAGE_MEDIA:
case AUDIO_USAGE_GAME:
case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
@@ -2174,7 +2189,7 @@ void AudioTrack::setStreamTypeFromAttributes(audio_attributes_t& aa) {
bool AudioTrack::isValidAttributes(const audio_attributes_t *paa) {
// has flags that map to a strategy?
- if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO)) != 0) {
+ if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO | AUDIO_FLAG_BEACON)) != 0) {
return true;
}
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index eec025e..561cb24 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -348,7 +348,13 @@ size_t ClientProxy::getFramesFilled() {
void AudioTrackClientProxy::flush()
{
- mCblk->u.mStreaming.mFlush++;
+ // This works for mFrameCountP2 <= 2^30
+ size_t increment = mFrameCountP2 << 1;
+ size_t mask = increment - 1;
+ audio_track_cblk_t* cblk = mCblk;
+ int32_t newFlush = (cblk->u.mStreaming.mRear & mask) |
+ ((cblk->u.mStreaming.mFlush & ~mask) + increment);
+ android_atomic_release_store(newFlush, &cblk->u.mStreaming.mFlush);
}
bool AudioTrackClientProxy::clearStreamEndDone() {
@@ -536,17 +542,27 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear);
front = cblk->u.mStreaming.mFront;
if (flush != mFlush) {
- mFlush = flush;
// effectively obtain then release whatever is in the buffer
- android_atomic_release_store(rear, &cblk->u.mStreaming.mFront);
- if (front != rear) {
+ size_t mask = (mFrameCountP2 << 1) - 1;
+ int32_t newFront = (front & ~mask) | (flush & mask);
+ ssize_t filled = rear - newFront;
+ // Rather than shutting down on a corrupt flush, just treat it as a full flush
+ if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
+ ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x",
+ mFlush, flush, front, rear, mask, newFront, filled, filled);
+ newFront = rear;
+ }
+ mFlush = flush;
+ android_atomic_release_store(newFront, &cblk->u.mStreaming.mFront);
+ // There is no danger from a false positive, so err on the side of caution
+ if (true /*front != newFront*/) {
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
(void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
}
}
- front = rear;
+ front = newFront;
}
} else {
front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront);
@@ -668,6 +684,7 @@ size_t AudioTrackServerProxy::framesReady()
int32_t flush = cblk->u.mStreaming.mFlush;
if (flush != mFlush) {
+ // FIXME should return an accurate value, but over-estimate is better than under-estimate
return mFrameCount;
}
// the acquire might not be necessary since not doing a subsequent read
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 99fd525..9349662 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -23,6 +23,7 @@
#include <binder/Parcel.h>
+#include <media/AudioEffect.h>
#include <media/IAudioPolicyService.h>
#include <system/audio.h>
@@ -704,8 +705,8 @@ status_t BnAudioPolicyService::onTransact(
case GET_OUTPUT_FOR_ATTR: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_attributes_t *attr = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
- data.read(attr, sizeof(audio_attributes_t));
+ audio_attributes_t attr;
+ data.read(&attr, sizeof(audio_attributes_t));
uint32_t samplingRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
@@ -716,7 +717,7 @@ status_t BnAudioPolicyService::onTransact(
if (hasOffloadInfo) {
data.read(&offloadInfo, sizeof(audio_offload_info_t));
}
- audio_io_handle_t output = getOutputForAttr(attr,
+ audio_io_handle_t output = getOutputForAttr(&attr,
samplingRate,
format,
channelMask,
@@ -916,16 +917,18 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
int audioSession = data.readInt32();
uint32_t count = data.readInt32();
+ if (count > AudioEffect::kMaxPreProcessing) {
+ count = AudioEffect::kMaxPreProcessing;
+ }
uint32_t retCount = count;
- effect_descriptor_t *descriptors =
- (effect_descriptor_t *)new char[count * sizeof(effect_descriptor_t)];
+ effect_descriptor_t *descriptors = new effect_descriptor_t[count];
status_t status = queryDefaultPreProcessing(audioSession, descriptors, &retCount);
reply->writeInt32(status);
if (status != NO_ERROR && status != NO_MEMORY) {
retCount = 0;
}
reply->writeInt32(retCount);
- if (retCount) {
+ if (retCount != 0) {
if (retCount < count) {
count = retCount;
}
diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp
index 0d5f990..c26c5bf 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/media/libmedia/ICrypto.cpp
@@ -33,6 +33,7 @@ enum {
DESTROY_PLUGIN,
REQUIRES_SECURE_COMPONENT,
DECRYPT,
+ NOTIFY_RESOLUTION,
};
struct BpCrypto : public BpInterface<ICrypto> {
@@ -149,6 +150,15 @@ struct BpCrypto : public BpInterface<ICrypto> {
return result;
}
+ virtual void notifyResolution(
+ uint32_t width, uint32_t height) {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICrypto::getInterfaceDescriptor());
+ data.writeInt32(width);
+ data.writeInt32(height);
+ remote()->transact(NOTIFY_RESOLUTION, data, &reply);
+ }
+
private:
DISALLOW_EVIL_CONSTRUCTORS(BpCrypto);
};
@@ -290,10 +300,20 @@ status_t BnCrypto::onTransact(
return OK;
}
+ case NOTIFY_RESOLUTION:
+ {
+ CHECK_INTERFACE(ICrypto, data, reply);
+
+ int32_t width = data.readInt32();
+ int32_t height = data.readInt32();
+ notifyResolution(width, height);
+
+ return OK;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
} // namespace android
-
diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp
index 0efdce8..b08fa82 100644
--- a/media/libmedia/IDrm.cpp
+++ b/media/libmedia/IDrm.cpp
@@ -54,7 +54,9 @@ enum {
SIGN_RSA,
VERIFY,
SET_LISTENER,
- UNPROVISION_DEVICE
+ UNPROVISION_DEVICE,
+ GET_SECURE_STOP,
+ RELEASE_ALL_SECURE_STOPS
};
struct BpDrm : public BpInterface<IDrm> {
@@ -255,6 +257,17 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ writeVector(data, ssid);
+ remote()->transact(GET_SECURE_STOP, data, &reply);
+
+ readVector(reply, secureStop);
+ return reply.readInt32();
+ }
+
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -265,6 +278,15 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t releaseAllSecureStops() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ remote()->transact(RELEASE_ALL_SECURE_STOPS, data, &reply);
+
+ return reply.readInt32();
+ }
+
virtual status_t getPropertyString(String8 const &name, String8 &value) const {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -655,6 +677,17 @@ status_t BnDrm::onTransact(
return OK;
}
+ case GET_SECURE_STOP:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ Vector<uint8_t> ssid, secureStop;
+ readVector(data, ssid);
+ status_t result = getSecureStop(ssid, secureStop);
+ writeVector(reply, secureStop);
+ reply->writeInt32(result);
+ return OK;
+ }
+
case RELEASE_SECURE_STOPS:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -664,6 +697,13 @@ status_t BnDrm::onTransact(
return OK;
}
+ case RELEASE_ALL_SECURE_STOPS:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ reply->writeInt32(releaseAllSecureStops());
+ return OK;
+ }
+
case GET_PROPERTY_STRING:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -809,4 +849,3 @@ status_t BnDrm::onTransact(
}
} // namespace android
-
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index c146b8d..f91e3e4 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -52,6 +52,13 @@ Visualizer::Visualizer (int32_t priority,
Visualizer::~Visualizer()
{
+ ALOGV("Visualizer::~Visualizer()");
+ if (mCaptureThread != NULL) {
+ mCaptureThread->requestExitAndWait();
+ mCaptureThread.clear();
+ }
+ mCaptureCallBack = NULL;
+ mCaptureFlags = 0;
}
status_t Visualizer::setEnabled(bool enabled)
@@ -102,20 +109,18 @@ status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t
return INVALID_OPERATION;
}
- sp<CaptureThread> t = mCaptureThread;
- if (t != 0) {
- t->mLock.lock();
+ if (mCaptureThread != 0) {
+ mCaptureLock.unlock();
+ mCaptureThread->requestExitAndWait();
+ mCaptureLock.lock();
}
+
mCaptureThread.clear();
mCaptureCallBack = cbk;
mCaptureCbkUser = user;
mCaptureFlags = flags;
mCaptureRate = rate;
- if (t != 0) {
- t->mLock.unlock();
- }
-
if (cbk != NULL) {
mCaptureThread = new CaptureThread(*this, rate, ((flags & CAPTURE_CALL_JAVA) != 0));
}
diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp
index 62593b2..8ee7c0b 100644
--- a/media/libmediaplayerservice/Crypto.cpp
+++ b/media/libmediaplayerservice/Crypto.cpp
@@ -257,4 +257,12 @@ ssize_t Crypto::decrypt(
errorDetailMsg);
}
+void Crypto::notifyResolution(uint32_t width, uint32_t height) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck == OK && mPlugin != NULL) {
+ mPlugin->notifyResolution(width, height);
+ }
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h
index c44ae34..0037c2e 100644
--- a/media/libmediaplayerservice/Crypto.h
+++ b/media/libmediaplayerservice/Crypto.h
@@ -45,6 +45,8 @@ struct Crypto : public BnCrypto {
virtual bool requiresSecureDecoderComponent(
const char *mime) const;
+ virtual void notifyResolution(uint32_t width, uint32_t height);
+
virtual ssize_t decrypt(
bool secure,
const uint8_t key[16],
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index 89d689d..0e59e4b 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -449,6 +449,20 @@ status_t Drm::getSecureStops(List<Vector<uint8_t> > &secureStops) {
return mPlugin->getSecureStops(secureStops);
}
+status_t Drm::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->getSecureStop(ssid, secureStop);
+}
+
status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Mutex::Autolock autoLock(mLock);
@@ -463,6 +477,20 @@ status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
return mPlugin->releaseSecureStops(ssRelease);
}
+status_t Drm::releaseAllSecureStops() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->releaseAllSecureStops();
+}
+
status_t Drm::getPropertyString(String8 const &name, String8 &value ) const {
Mutex::Autolock autoLock(mLock);
diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h
index 9e23e2e..0e1eb2c 100644
--- a/media/libmediaplayerservice/Drm.h
+++ b/media/libmediaplayerservice/Drm.h
@@ -78,8 +78,10 @@ struct Drm : public BnDrm,
virtual status_t unprovisionDevice();
virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops);
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
+ virtual status_t releaseAllSecureStops();
virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
virtual status_t getPropertyByteArray(String8 const &name,
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 7bb154e..cd5478a 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2158,7 +2158,6 @@ void MediaPlayerService::AudioCache::notify(
{
case MEDIA_ERROR:
ALOGE("Error %d, %d occurred", ext1, ext2);
- p->mError = ext1;
break;
case MEDIA_PREPARED:
ALOGV("prepared");
@@ -2173,6 +2172,9 @@ void MediaPlayerService::AudioCache::notify(
// wake up thread
Mutex::Autolock lock(p->mLock);
+ if (msg == MEDIA_ERROR) {
+ p->mError = ext1;
+ }
p->mCommandComplete = true;
p->mSignal.signal();
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index cadd691..3d093fa 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,7 +111,7 @@ sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const
status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
ALOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
- as >= AUDIO_SOURCE_CNT) {
+ (as >= AUDIO_SOURCE_CNT && as != AUDIO_SOURCE_FM_TUNER)) {
ALOGE("Invalid audio source: %d", as);
return BAD_VALUE;
}
@@ -981,7 +981,7 @@ status_t StagefrightRecorder::setupAMRRecording() {
}
status_t StagefrightRecorder::setupRawAudioRecording() {
- if (mAudioSource >= AUDIO_SOURCE_CNT) {
+ if (mAudioSource >= AUDIO_SOURCE_CNT && mAudioSource != AUDIO_SOURCE_FM_TUNER) {
ALOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 6859a1a..d446cec 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -45,6 +45,10 @@ NuPlayer::GenericSource::GenericSource(
bool uidValid,
uid_t uid)
: Source(notify),
+ mAudioTimeUs(0),
+ mAudioLastDequeueTimeUs(0),
+ mVideoTimeUs(0),
+ mVideoLastDequeueTimeUs(0),
mFetchSubtitleDataGeneration(0),
mFetchTimedTextDataGeneration(0),
mDurationUs(0ll),
@@ -62,8 +66,6 @@ NuPlayer::GenericSource::GenericSource(
}
void NuPlayer::GenericSource::resetDataSource() {
- mAudioTimeUs = 0;
- mVideoTimeUs = 0;
mHTTPService.clear();
mHttpSource.clear();
mUri.clear();
@@ -644,17 +646,13 @@ void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
track->mSource->start();
track->mIndex = trackIndex;
- status_t avail;
- if (!track->mPackets->hasBufferAvailable(&avail)) {
- // sync from other source
- TRESPASS();
- break;
- }
-
int64_t timeUs, actualTimeUs;
const bool formatChange = true;
- sp<AMessage> latestMeta = track->mPackets->getLatestEnqueuedMeta();
- CHECK(latestMeta != NULL && latestMeta->findInt64("timeUs", &timeUs));
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ timeUs = mAudioLastDequeueTimeUs;
+ } else {
+ timeUs = mVideoLastDequeueTimeUs;
+ }
readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
readBuffer(counterpartType, -1, NULL, formatChange);
ALOGV("timeUs %lld actualTimeUs %lld", timeUs, actualTimeUs);
@@ -866,6 +864,11 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(
int64_t timeUs;
status_t eosResult; // ignored
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+ if (audio) {
+ mAudioLastDequeueTimeUs = timeUs;
+ } else {
+ mVideoLastDequeueTimeUs = timeUs;
+ }
if (mSubtitleTrack.mSource != NULL
&& !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
@@ -1132,10 +1135,12 @@ status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
seekTimeUs = actualTimeUs;
+ mVideoLastDequeueTimeUs = seekTimeUs;
}
if (mAudioTrack.mSource != NULL) {
readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
+ mAudioLastDequeueTimeUs = seekTimeUs;
}
setDrmPlaybackStatusIfNeeded(Playback::START, seekTimeUs / 1000);
@@ -1311,11 +1316,9 @@ void NuPlayer::GenericSource::readBuffer(
if ((seeking || formatChange)
&& (trackType == MEDIA_TRACK_TYPE_AUDIO
|| trackType == MEDIA_TRACK_TYPE_VIDEO)) {
- ATSParser::DiscontinuityType type = formatChange
- ? (seeking
- ? ATSParser::DISCONTINUITY_FORMATCHANGE
- : ATSParser::DISCONTINUITY_NONE)
- : ATSParser::DISCONTINUITY_SEEK;
+ ATSParser::DiscontinuityType type = (formatChange && seeking)
+ ? ATSParser::DISCONTINUITY_FORMATCHANGE
+ : ATSParser::DISCONTINUITY_NONE;
track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index f8601ea..7a03df0 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -106,8 +106,10 @@ private:
Track mAudioTrack;
int64_t mAudioTimeUs;
+ int64_t mAudioLastDequeueTimeUs;
Track mVideoTrack;
int64_t mVideoTimeUs;
+ int64_t mVideoLastDequeueTimeUs;
Track mSubtitleTrack;
Track mTimedTextTrack;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index a003c81..02e9caf 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -98,6 +98,10 @@ void NuPlayer::HTTPLiveSource::start() {
}
sp<AMessage> NuPlayer::HTTPLiveSource::getFormat(bool audio) {
+ if (mLiveSession == NULL) {
+ return NULL;
+ }
+
sp<AMessage> format;
status_t err = mLiveSession->getStreamFormat(
audio ? LiveSession::STREAMTYPE_AUDIO
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index a63a940..4f88f02 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -95,21 +95,21 @@ private:
DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
};
-struct NuPlayer::ShutdownDecoderAction : public Action {
- ShutdownDecoderAction(bool audio, bool video)
+struct NuPlayer::FlushDecoderAction : public Action {
+ FlushDecoderAction(FlushCommand audio, FlushCommand video)
: mAudio(audio),
mVideo(video) {
}
virtual void execute(NuPlayer *player) {
- player->performDecoderShutdown(mAudio, mVideo);
+ player->performDecoderFlush(mAudio, mVideo);
}
private:
- bool mAudio;
- bool mVideo;
+ FlushCommand mAudio;
+ FlushCommand mVideo;
- DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction);
+ DISALLOW_EVIL_CONSTRUCTORS(FlushDecoderAction);
};
struct NuPlayer::PostMessageAction : public Action {
@@ -165,8 +165,6 @@ NuPlayer::NuPlayer()
mTimeDiscontinuityPending(false),
mFlushingAudio(NONE),
mFlushingVideo(NONE),
- mSkipRenderingAudioUntilMediaTimeUs(-1ll),
- mSkipRenderingVideoUntilMediaTimeUs(-1ll),
mNumFramesTotal(0ll),
mNumFramesDropped(0ll),
mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
@@ -306,10 +304,6 @@ void NuPlayer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
-void NuPlayer::resume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
void NuPlayer::resetAsync() {
if (mSource != NULL) {
// During a reset, the data source might be unresponsive already, we need to
@@ -526,19 +520,24 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
ALOGV("kWhatSetVideoNativeWindow");
- mDeferredActions.push_back(
- new ShutdownDecoderAction(
- false /* audio */, true /* video */));
-
sp<RefBase> obj;
CHECK(msg->findObject("native-window", &obj));
+ if (mSource->getFormat(false /* audio */) == NULL) {
+ performSetSurface(static_cast<NativeWindowWrapper *>(obj.get()));
+ break;
+ }
+
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
+
mDeferredActions.push_back(
new SetSurfaceAction(
static_cast<NativeWindowWrapper *>(obj.get())));
if (obj != NULL) {
- if (mStarted && mSource->getFormat(false /* audio */) != NULL) {
+ if (mStarted) {
// Issue a seek to refresh the video screen only if started otherwise
// the extractor may not yet be started and will assert.
// If the video decoder is not set (perhaps audio only in this case)
@@ -574,69 +573,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatStart:
{
ALOGV("kWhatStart");
-
- mVideoIsAVC = false;
- mOffloadAudio = false;
- mAudioEOS = false;
- mVideoEOS = false;
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- mNumFramesTotal = 0;
- mNumFramesDropped = 0;
- mStarted = true;
-
- /* instantiate decoders now for secure playback */
- if (mSourceFlags & Source::FLAG_SECURE) {
- if (mNativeWindow != NULL) {
- instantiateDecoder(false, &mVideoDecoder);
- }
-
- if (mAudioSink != NULL) {
- instantiateDecoder(true, &mAudioDecoder);
- }
- }
-
- mSource->start();
-
- uint32_t flags = 0;
-
- if (mSource->isRealTime()) {
- flags |= Renderer::FLAG_REAL_TIME;
- }
-
- sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
- audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
- if (mAudioSink != NULL) {
- streamType = mAudioSink->getAudioStreamType();
- }
-
- sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
-
- mOffloadAudio =
- canOffloadStream(audioMeta, (videoFormat != NULL),
- true /* is_streaming */, streamType);
- if (mOffloadAudio) {
- flags |= Renderer::FLAG_OFFLOAD_AUDIO;
- }
-
- sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
- ++mRendererGeneration;
- notify->setInt32("generation", mRendererGeneration);
- mRenderer = new Renderer(mAudioSink, notify, flags);
-
- mRendererLooper = new ALooper;
- mRendererLooper->setName("NuPlayerRenderer");
- mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
- mRendererLooper->registerHandler(mRenderer);
-
- sp<MetaData> meta = getFileMeta();
- int32_t rate;
- if (meta != NULL
- && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
- mRenderer->setVideoFrameRate(rate);
+ if (mStarted) {
+ onResume();
+ } else {
+ onStart();
}
-
- postScanSources();
break;
}
@@ -758,19 +699,14 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
handleFlushComplete(audio, true /* isDecoder */);
finishFlushIfPossible();
- } else if (what == Decoder::kWhatOutputFormatChanged) {
+ } else if (what == Decoder::kWhatVideoSizeChanged) {
sp<AMessage> format;
CHECK(msg->findMessage("format", &format));
- if (audio) {
- openAudioSink(format, false /*offloadOnly*/);
- } else {
- // video
- sp<AMessage> inputFormat =
- mSource->getFormat(false /* audio */);
+ sp<AMessage> inputFormat =
+ mSource->getFormat(false /* audio */);
- updateVideoSize(inputFormat, format);
- }
+ updateVideoSize(inputFormat, format);
} else if (what == Decoder::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
@@ -811,7 +747,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
switch (*flushing) {
case NONE:
mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, !audio /* video */));
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
processDeferredActions();
break;
case FLUSHING_DECODER:
@@ -834,7 +772,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break; // Finish anyways.
}
notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
- } else if (what == Decoder::kWhatDrainThisBuffer) {
+ } else if (what == Decoder::kWhatRenderBufferTime) {
renderBuffer(audio, msg);
} else {
ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
@@ -934,8 +872,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("kWhatReset");
mDeferredActions.push_back(
- new ShutdownDecoderAction(
- true /* audio */, true /* video */));
+ new FlushDecoderAction(
+ FLUSH_CMD_SHUTDOWN /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performReset));
@@ -955,7 +894,8 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
seekTimeUs, needNotify);
mDeferredActions.push_back(
- new SimpleAction(&NuPlayer::performDecoderFlush));
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_FLUSH /* video */));
mDeferredActions.push_back(
new SeekAction(seekTimeUs, needNotify));
@@ -979,26 +919,6 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatResume:
- {
- if (mSource != NULL) {
- mSource->resume();
- } else {
- ALOGW("resume called when source is gone or not set");
- }
- // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
- // needed.
- if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
- instantiateDecoder(true /* audio */, &mAudioDecoder);
- }
- if (mRenderer != NULL) {
- mRenderer->resume();
- } else {
- ALOGW("resume called when renderer is gone or not set");
- }
- break;
- }
-
case kWhatSourceNotify:
{
onSourceNotify(msg);
@@ -1017,6 +937,94 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
}
+void NuPlayer::onResume() {
+ if (mSource != NULL) {
+ mSource->resume();
+ } else {
+ ALOGW("resume called when source is gone or not set");
+ }
+ // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+ // needed.
+ if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
+ if (mRenderer != NULL) {
+ mRenderer->resume();
+ } else {
+ ALOGW("resume called when renderer is gone or not set");
+ }
+}
+
+void NuPlayer::onStart() {
+ mVideoIsAVC = false;
+ mOffloadAudio = false;
+ mAudioEOS = false;
+ mVideoEOS = false;
+ mNumFramesTotal = 0;
+ mNumFramesDropped = 0;
+ mStarted = true;
+
+ /* instantiate decoders now for secure playback */
+ if (mSourceFlags & Source::FLAG_SECURE) {
+ if (mNativeWindow != NULL) {
+ instantiateDecoder(false, &mVideoDecoder);
+ }
+
+ if (mAudioSink != NULL) {
+ instantiateDecoder(true, &mAudioDecoder);
+ }
+ }
+
+ mSource->start();
+
+ uint32_t flags = 0;
+
+ if (mSource->isRealTime()) {
+ flags |= Renderer::FLAG_REAL_TIME;
+ }
+
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+
+ mOffloadAudio =
+ canOffloadStream(audioMeta, (videoFormat != NULL),
+ true /* is_streaming */, streamType);
+ if (mOffloadAudio) {
+ flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+ }
+
+ sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
+ ++mRendererGeneration;
+ notify->setInt32("generation", mRendererGeneration);
+ mRenderer = new Renderer(mAudioSink, notify, flags);
+
+ mRendererLooper = new ALooper;
+ mRendererLooper->setName("NuPlayerRenderer");
+ mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ mRendererLooper->registerHandler(mRenderer);
+
+ sp<MetaData> meta = getFileMeta();
+ int32_t rate;
+ if (meta != NULL
+ && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
+ mRenderer->setVideoFrameRate(rate);
+ }
+
+ if (mVideoDecoder != NULL) {
+ mVideoDecoder->setRenderer(mRenderer);
+ }
+ if (mAudioDecoder != NULL) {
+ mAudioDecoder->setRenderer(mRenderer);
+ }
+
+ postScanSources();
+}
+
bool NuPlayer::audioDecoderStillNeeded() {
// Audio decoder is no longer needed if it's in shut/shutting down status.
return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
@@ -1119,7 +1127,7 @@ void NuPlayer::openAudioSink(const sp<AMessage> &format, bool offloadOnly) {
// Current code will just make that we select deep buffer
// with video which should not be a problem as it should
// not prevent from keeping A/V sync.
- if (hasVideo &&
+ if (!hasVideo &&
mSource->getDuration(&durationUs) == OK &&
durationUs
> AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
@@ -1172,16 +1180,16 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
notify->setInt32("generation", mAudioDecoderGeneration);
if (mOffloadAudio) {
- *decoder = new DecoderPassThrough(notify);
+ *decoder = new DecoderPassThrough(notify, mSource, mRenderer);
} else {
- *decoder = new Decoder(notify);
+ *decoder = new Decoder(notify, mSource, mRenderer);
}
} else {
sp<AMessage> notify = new AMessage(kWhatVideoNotify, id());
++mVideoDecoderGeneration;
notify->setInt32("generation", mVideoDecoderGeneration);
- *decoder = new Decoder(notify, mNativeWindow);
+ *decoder = new Decoder(notify, mSource, mRenderer, mNativeWindow);
}
(*decoder)->init();
(*decoder)->configure(format);
@@ -1270,33 +1278,6 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
ALOGI("%s discontinuity (formatChange=%d, time=%d)",
audio ? "audio" : "video", formatChange, timeChange);
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- }
-
- if (timeChange) {
- sp<AMessage> extra;
- if (accessUnit->meta()->findMessage("extra", &extra)
- && extra != NULL) {
- int64_t resumeAtMediaTimeUs;
- if (extra->findInt64(
- "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
- ALOGI("suppressing rendering of %s until %lld us",
- audio ? "audio" : "video", resumeAtMediaTimeUs);
-
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- }
- }
- }
- }
-
mTimeDiscontinuityPending =
mTimeDiscontinuityPending || timeChange;
@@ -1437,9 +1418,6 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
// ALOGV("renderBuffer %s", audio ? "audio" : "video");
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
-
if ((audio && mFlushingAudio != NONE)
|| (!audio && mFlushingVideo != NONE)) {
// We're currently attempting to flush the decoder, in order
@@ -1450,40 +1428,15 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
ALOGV("we're still flushing the %s decoder, sending its output buffer"
" right back.", audio ? "audio" : "video");
- reply->post();
return;
}
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
-
int64_t mediaTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
- int64_t &skipUntilMediaTimeUs =
- audio
- ? mSkipRenderingAudioUntilMediaTimeUs
- : mSkipRenderingVideoUntilMediaTimeUs;
-
- if (skipUntilMediaTimeUs >= 0) {
-
- if (mediaTimeUs < skipUntilMediaTimeUs) {
- ALOGV("dropping %s buffer at time %lld as requested.",
- audio ? "audio" : "video",
- mediaTimeUs);
-
- reply->post();
- return;
- }
-
- skipUntilMediaTimeUs = -1;
- }
+ CHECK(msg->findInt64("timeUs", &mediaTimeUs));
if (!audio && mCCDecoder->isSelected()) {
mCCDecoder->display(mediaTimeUs);
}
-
- mRenderer->queueBuffer(audio, buffer, reply);
}
void NuPlayer::updateVideoSize(
@@ -1583,7 +1536,6 @@ void NuPlayer::flushDecoder(
mScanSourcesPending = false;
decoder->signalFlush(newFormat);
- mRenderer->flush(audio);
FlushStatus newStatus =
needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
@@ -1624,7 +1576,9 @@ void NuPlayer::queueDecoderShutdown(
ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, video));
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ video ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE));
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performScanSources));
@@ -1769,40 +1723,22 @@ void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {
// everything's flushed, continue playback.
}
-void NuPlayer::performDecoderFlush() {
- ALOGV("performDecoderFlush");
-
- if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
- return;
- }
-
- mTimeDiscontinuityPending = true;
-
- if (mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, false /* needShutdown */);
- }
-
- if (mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, false /* needShutdown */);
- }
-}
-
-void NuPlayer::performDecoderShutdown(bool audio, bool video) {
- ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video);
+void NuPlayer::performDecoderFlush(FlushCommand audio, FlushCommand video) {
+ ALOGV("performDecoderFlush audio=%d, video=%d", audio, video);
- if ((!audio || mAudioDecoder == NULL)
- && (!video || mVideoDecoder == NULL)) {
+ if ((audio == FLUSH_CMD_NONE || mAudioDecoder == NULL)
+ && (video == FLUSH_CMD_NONE || mVideoDecoder == NULL)) {
return;
}
mTimeDiscontinuityPending = true;
- if (audio && mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, true /* needShutdown */);
+ if (audio != FLUSH_CMD_NONE && mAudioDecoder != NULL) {
+ flushDecoder(true /* audio */, (audio == FLUSH_CMD_SHUTDOWN));
}
- if (video && mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, true /* needShutdown */);
+ if (video != FLUSH_CMD_NONE && mVideoDecoder != NULL) {
+ flushDecoder(false /* audio */, (video == FLUSH_CMD_SHUTDOWN));
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 24c06c9..7175e15 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -54,7 +54,6 @@ struct NuPlayer : public AHandler {
void start();
void pause();
- void resume();
// Will notify the driver through "notifyResetComplete" once finished.
void resetAsync();
@@ -95,7 +94,7 @@ private:
struct Action;
struct SeekAction;
struct SetSurfaceAction;
- struct ShutdownDecoderAction;
+ struct FlushDecoderAction;
struct PostMessageAction;
struct SimpleAction;
@@ -160,6 +159,12 @@ private:
SHUT_DOWN,
};
+ enum FlushCommand {
+ FLUSH_CMD_NONE,
+ FLUSH_CMD_FLUSH,
+ FLUSH_CMD_SHUTDOWN,
+ };
+
// Once the current flush is complete this indicates whether the
// notion of time has changed.
bool mTimeDiscontinuityPending;
@@ -176,9 +181,6 @@ private:
FlushStatus mFlushingAudio;
FlushStatus mFlushingVideo;
- int64_t mSkipRenderingAudioUntilMediaTimeUs;
- int64_t mSkipRenderingVideoUntilMediaTimeUs;
-
int64_t mNumFramesTotal, mNumFramesDropped;
int32_t mVideoScalingMode;
@@ -213,6 +215,9 @@ private:
void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
+ void onStart();
+ void onResume();
+
bool audioDecoderStillNeeded();
void flushDecoder(
@@ -227,8 +232,7 @@ private:
void processDeferredActions();
void performSeek(int64_t seekTimeUs, bool needNotify);
- void performDecoderFlush();
- void performDecoderShutdown(bool audio, bool video);
+ void performDecoderFlush(FlushCommand audio, FlushCommand video);
void performReset();
void performScanSources();
void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 27f6131..e695c43 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -21,6 +21,9 @@
#include "NuPlayerDecoder.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -35,9 +38,14 @@ namespace android {
NuPlayer::Decoder::Decoder(
const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer,
const sp<NativeWindowWrapper> &nativeWindow)
: mNotify(notify),
mNativeWindow(nativeWindow),
+ mSource(source),
+ mRenderer(renderer),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
mBufferGeneration(0),
mPaused(true),
mComponentName("decoder") {
@@ -169,7 +177,9 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
mInputBuffers.size(),
mOutputBuffers.size());
- requestCodecNotification();
+ if (mRenderer != NULL) {
+ requestCodecNotification();
+ }
mPaused = false;
}
@@ -191,6 +201,7 @@ void NuPlayer::Decoder::releaseAndResetMediaBuffers() {
}
mPendingInputMessages.clear();
+ mSkipRenderingUntilMediaTimeUs = -1;
}
void NuPlayer::Decoder::requestCodecNotification() {
@@ -217,6 +228,12 @@ void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
msg->post();
}
+void NuPlayer::Decoder::setRenderer(const sp<Renderer> &renderer) {
+ sp<AMessage> msg = new AMessage(kWhatSetRenderer, id());
+ msg->setObject("renderer", renderer);
+ msg->post();
+}
+
void NuPlayer::Decoder::signalUpdateFormat(const sp<AMessage> &format) {
sp<AMessage> msg = new AMessage(kWhatUpdateFormat, id());
msg->setMessage("format", format);
@@ -342,8 +359,6 @@ bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
}
}
-
-
if (buffer == NULL /* includes !hasBuffer */) {
int32_t streamErr = ERROR_END_OF_STREAM;
CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
@@ -375,6 +390,17 @@ bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
handleError(streamErr);
}
} else {
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
int64_t timeUs = 0;
uint32_t flags = 0;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
@@ -454,10 +480,27 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() {
return false;
}
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
- notify->setMessage("format", format);
- notify->post();
+ if (isVideo()) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatVideoSizeChanged);
+ notify->setMessage("format", format);
+ notify->post();
+ } else if (mRenderer != NULL) {
+ uint32_t flags;
+ int64_t durationUs;
+ bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+ if (!hasVideo &&
+ mSource->getDuration(&durationUs) == OK &&
+ durationUs
+ > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ } else {
+ flags = AUDIO_OUTPUT_FLAG_NONE;
+ }
+
+ mRenderer->openAudioSink(
+ format, false /* offloadOnly */, hasVideo, flags);
+ }
return true;
} else if (res == INFO_DISCONTINUITY) {
// nothing to do
@@ -485,21 +528,26 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() {
reply->setSize("buffer-ix", bufferIx);
reply->setInt32("generation", mBufferGeneration);
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatDrainThisBuffer);
- notify->setBuffer("buffer", buffer);
- notify->setMessage("reply", reply);
- notify->post();
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
- // FIXME: This should be handled after rendering is complete,
- // but Renderer needs it now
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- ALOGV("queueing eos [%s]", mComponentName.c_str());
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatEOS);
- notify->setInt32("err", ERROR_END_OF_STREAM);
- notify->post();
+ reply->post();
+ return true;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ if (mRenderer != NULL) {
+ // send the buffer to renderer.
+ mRenderer->queueBuffer(!isVideo(), buffer, reply);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mRenderer->queueEOS(!isVideo(), ERROR_END_OF_STREAM);
+ }
}
+
return true;
}
@@ -508,6 +556,17 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
int32_t render;
size_t bufferIx;
CHECK(msg->findSize("buffer-ix", &bufferIx));
+
+ if (isVideo()) {
+ int64_t timeUs;
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->meta()->findInt64("timeUs", &timeUs);
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatRenderBufferTime);
+ notify->setInt64("timeUs", timeUs);
+ notify->post();
+ }
+
if (msg->findInt32("render", &render) && render) {
int64_t timestampNs;
CHECK(msg->findInt64("timestampNs", &timestampNs));
@@ -523,6 +582,10 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
}
void NuPlayer::Decoder::onFlush() {
+ if (mRenderer != NULL) {
+ mRenderer->flush(!isVideo());
+ }
+
status_t err = OK;
if (mCodec != NULL) {
err = mCodec->flush();
@@ -594,6 +657,18 @@ void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetRenderer:
+ {
+ bool hadNoRenderer = (mRenderer == NULL);
+ sp<RefBase> obj;
+ CHECK(msg->findObject("renderer", &obj));
+ mRenderer = static_cast<Renderer *>(obj.get());
+ if (hadNoRenderer && mRenderer != NULL) {
+ requestCodecNotification();
+ }
+ break;
+ }
+
case kWhatUpdateFormat:
{
sp<AMessage> format;
@@ -772,6 +847,10 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
return seamless;
}
+bool NuPlayer::Decoder::isVideo() {
+ return mNativeWindow != NULL;
+}
+
struct CCData {
CCData(uint8_t type, uint8_t data1, uint8_t data2)
: mType(type), mData1(data1), mData2(data2) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index dba3eee..c6ceb4e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -30,11 +30,15 @@ struct MediaBuffer;
struct NuPlayer::Decoder : public AHandler {
Decoder(const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer = NULL,
const sp<NativeWindowWrapper> &nativeWindow = NULL);
virtual void configure(const sp<AMessage> &format);
virtual void init();
+ virtual void setRenderer(const sp<Renderer> &renderer);
+
status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
virtual void signalFlush(const sp<AMessage> &format = NULL);
virtual void signalUpdateFormat(const sp<AMessage> &format);
@@ -45,8 +49,8 @@ struct NuPlayer::Decoder : public AHandler {
enum {
kWhatFillThisBuffer = 'flTB',
- kWhatDrainThisBuffer = 'drTB',
- kWhatOutputFormatChanged = 'fmtC',
+ kWhatRenderBufferTime = 'rnBT',
+ kWhatVideoSizeChanged = 'viSC',
kWhatFlushCompleted = 'flsC',
kWhatShutdownCompleted = 'shDC',
kWhatEOS = 'eos ',
@@ -59,10 +63,10 @@ protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
-private:
enum {
kWhatCodecNotify = 'cdcN',
kWhatConfigure = 'conf',
+ kWhatSetRenderer = 'setR',
kWhatGetInputBuffers = 'gInB',
kWhatInputBufferFilled = 'inpF',
kWhatRenderBuffer = 'rndr',
@@ -71,9 +75,13 @@ private:
kWhatUpdateFormat = 'uFmt',
};
+private:
sp<AMessage> mNotify;
sp<NativeWindowWrapper> mNativeWindow;
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+
sp<AMessage> mInputFormat;
sp<AMessage> mOutputFormat;
sp<MediaCodec> mCodec;
@@ -89,6 +97,8 @@ private:
Vector<bool> mInputBufferIsDequeued;
Vector<MediaBuffer *> mMediaBuffers;
+ int64_t mSkipRenderingUntilMediaTimeUs;
+
void handleError(int32_t err);
bool handleAnInputBuffer();
bool handleAnOutputBuffer();
@@ -110,6 +120,7 @@ private:
bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
void rememberCodecSpecificData(const sp<AMessage> &format);
+ bool isVideo();
DISALLOW_EVIL_CONSTRUCTORS(Decoder);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index f7aacdd..d2721ed 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -21,6 +21,9 @@
#include "NuPlayerDecoderPassThrough.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -36,15 +39,21 @@ static const size_t kMaxCachedBytes = 200000;
static const size_t kMaxPendingBuffers = 1 + (kMaxCachedBytes / NuPlayer::kAggregateBufferSizeBytes);
NuPlayer::DecoderPassThrough::DecoderPassThrough(
- const sp<AMessage> &notify)
- : Decoder(notify),
+ const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer)
+ : Decoder(notify, source),
mNotify(notify),
+ mSource(source),
+ mRenderer(renderer),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
mBufferGeneration(0),
mReachedEOS(true),
mPendingBuffersToFill(0),
mPendingBuffersToDrain(0),
mCachedBytes(0),
mComponentName("pass through decoder") {
+ ALOGW_IF(renderer == NULL, "expect a non-NULL renderer");
mDecoderLooper = new ALooper;
mDecoderLooper->setName("NuPlayerDecoderPassThrough");
mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
@@ -90,10 +99,17 @@ void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
requestMaxBuffers();
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
- notify->setMessage("format", format);
- notify->post();
+ uint32_t flags;
+ int64_t durationUs;
+ if (mSource->getDuration(&durationUs) == OK &&
+ durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ } else {
+ flags = AUDIO_OUTPUT_FLAG_NONE;
+ }
+
+ mRenderer->openAudioSink(
+ format, true /* offloadOnly */, false /* hasVideo */, flags);
}
bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
@@ -138,25 +154,52 @@ void android::NuPlayer::DecoderPassThrough::onInputBufferFilled(
msg->findBuffer("buffer", &buffer);
if (buffer == NULL) {
mReachedEOS = true;
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatEOS);
- notify->setInt32("err", ERROR_END_OF_STREAM);
- notify->post();
+ if (mRenderer != NULL) {
+ mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
+ }
return;
}
- mCachedBytes += buffer->size();
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
+ int32_t bufferSize = buffer->size();
+ mCachedBytes += bufferSize;
+
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ int64_t timeUs = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
+
+ onBufferConsumed(bufferSize);
+ return;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ if (mRenderer == NULL) {
+ onBufferConsumed(bufferSize);
+ return;
+ }
sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id());
reply->setInt32("generation", mBufferGeneration);
- reply->setInt32("size", buffer->size());
+ reply->setInt32("size", bufferSize);
+
+ mRenderer->queueBuffer(true /* audio */, buffer, reply);
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatDrainThisBuffer);
- notify->setBuffer("buffer", buffer);
- notify->setMessage("reply", reply);
- notify->post();
++mPendingBuffersToDrain;
ALOGV("onInputBufferFilled: #ToFill = %zu, #ToDrain = %zu, cachedBytes = %zu",
mPendingBuffersToFill, mPendingBuffersToDrain, mCachedBytes);
@@ -172,6 +215,11 @@ void NuPlayer::DecoderPassThrough::onBufferConsumed(int32_t size) {
void NuPlayer::DecoderPassThrough::onFlush() {
++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
+
+ if (mRenderer != NULL) {
+ mRenderer->flush(true /* audio */);
+ }
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatFlushCompleted);
@@ -192,6 +240,7 @@ void NuPlayer::DecoderPassThrough::requestMaxBuffers() {
void NuPlayer::DecoderPassThrough::onShutdown() {
++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatShutdownCompleted);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
index fb20257..7742d30 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -25,7 +25,9 @@
namespace android {
struct NuPlayer::DecoderPassThrough : public Decoder {
- DecoderPassThrough(const sp<AMessage> &notify);
+ DecoderPassThrough(const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer);
virtual void configure(const sp<AMessage> &format);
virtual void init();
@@ -45,16 +47,15 @@ protected:
private:
enum {
kWhatRequestABuffer = 'reqB',
- kWhatConfigure = 'conf',
- kWhatInputBufferFilled = 'inpF',
kWhatBufferConsumed = 'bufC',
- kWhatFlush = 'flus',
- kWhatShutdown = 'shuD',
};
sp<AMessage> mNotify;
sp<ALooper> mDecoderLooper;
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+
/** Returns true if a buffer was requested.
* Returns false if at EOS or cache already full.
*/
@@ -68,6 +69,8 @@ private:
void requestMaxBuffers();
void onShutdown();
+ int64_t mSkipRenderingUntilMediaTimeUs;
+
int32_t mBufferGeneration;
bool mReachedEOS;
// TODO mPendingBuffersToFill and mPendingBuffersToDrain are only for
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index ab46074..b42b480 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -239,16 +239,24 @@ status_t NuPlayerDriver::start() {
// fall through
}
+ case STATE_PAUSED:
+ case STATE_STOPPED_AND_PREPARED:
+ {
+ if (mAtEOS && mStartupSeekTimeUs < 0) {
+ mStartupSeekTimeUs = 0;
+ mPositionUs = -1;
+ }
+
+ // fall through
+ }
+
case STATE_PREPARED:
{
mAtEOS = false;
mPlayer->start();
if (mStartupSeekTimeUs >= 0) {
- if (mStartupSeekTimeUs > 0) {
- mPlayer->seekToAsync(mStartupSeekTimeUs);
- }
-
+ mPlayer->seekToAsync(mStartupSeekTimeUs);
mStartupSeekTimeUs = -1;
}
break;
@@ -264,20 +272,6 @@ status_t NuPlayerDriver::start() {
break;
}
- case STATE_PAUSED:
- case STATE_STOPPED_AND_PREPARED:
- {
- if (mAtEOS) {
- mPlayer->seekToAsync(0);
- mAtEOS = false;
- mPlayer->resume();
- mPositionUs = -1;
- } else {
- mPlayer->resume();
- }
- break;
- }
-
default:
return INVALID_OPERATION;
}
@@ -348,6 +342,7 @@ status_t NuPlayerDriver::seekTo(int msec) {
switch (mState) {
case STATE_PREPARED:
+ case STATE_STOPPED_AND_PREPARED:
{
mStartupSeekTimeUs = seekTimeUs;
// pretend that the seek completed. It will actually happen when starting playback.
@@ -633,6 +628,11 @@ void NuPlayerDriver::notifyListener_l(
if (mLooping || (mAutoLoop
&& (mAudioSink == NULL || mAudioSink->realtime()))) {
mPlayer->seekToAsync(0);
+ if (mAudioSink != NULL) {
+ // The renderer has stopped the sink at the end in order to play out
+ // the last little bit of audio. If we're looping, we need to restart it.
+ mAudioSink->start();
+ }
break;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 638d9bc..73bc829 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,8 +20,6 @@
#include "NuPlayerRenderer.h"
-#include <cutils/properties.h>
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -38,21 +36,11 @@ namespace android {
// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
// is closed to allow the audio DSP to power down.
-static const int64_t kOffloadPauseMaxUs = 60000000ll;
+static const int64_t kOffloadPauseMaxUs = 10000000ll;
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
-static bool sFrameAccurateAVsync = false;
-
-static void readProperties() {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("persist.sys.media.avsync", value, NULL)) {
- sFrameAccurateAVsync =
- !strcmp("1", value) || !strcasecmp("true", value);
- }
-}
-
NuPlayer::Renderer::Renderer(
const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
@@ -68,6 +56,8 @@ NuPlayer::Renderer::Renderer(
mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
+ mAnchorNumFramesWritten(-1),
+ mAnchorMaxMediaUs(-1),
mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
@@ -85,7 +75,6 @@ NuPlayer::Renderer::Renderer(
mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0) {
- readProperties();
}
NuPlayer::Renderer::~Renderer() {
@@ -173,7 +162,8 @@ status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
return getCurrentPosition(mediaUs, ALooper::GetNowUs());
}
-status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) {
+status_t NuPlayer::Renderer::getCurrentPosition(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
Mutex::Autolock autoLock(mTimeLock);
if (!mHasAudio && !mHasVideo) {
return NO_INIT;
@@ -182,12 +172,21 @@ status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs)
if (mAnchorTimeMediaUs < 0) {
return NO_INIT;
}
+
int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
if (mPauseStartedTimeRealUs != -1) {
positionUs -= (nowUs - mPauseStartedTimeRealUs);
}
+ // limit position to the last queued media time (for video only stream
+ // position will be discrete as we don't know how long each frame lasts)
+ if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
+ if (positionUs > mAnchorMaxMediaUs) {
+ positionUs = mAnchorMaxMediaUs;
+ }
+ }
+
if (positionUs < mAudioFirstAnchorTimeMediaUs) {
positionUs = mAudioFirstAnchorTimeMediaUs;
}
@@ -217,10 +216,12 @@ void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
}
}
-void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) {
+void NuPlayer::Renderer::setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
Mutex::Autolock autoLock(mTimeLock);
mAnchorTimeMediaUs = mediaUs;
mAnchorTimeRealUs = realUs;
+ mAnchorNumFramesWritten = numFramesWritten;
if (resume) {
mPauseStartedTimeRealUs = -1;
}
@@ -541,7 +542,7 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
- onNewAudioMediaTime(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -564,6 +565,14 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
notifyIfMediaRenderingStarted();
}
+ if (mAudioFirstAnchorTimeMediaUs >= 0) {
+ int64_t nowUs = ALooper::GetNowUs();
+ setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
+ }
+
+ // we don't know how much data we are queueing for offloaded tracks
+ mAnchorMaxMediaUs = -1;
+
if (hasEOS) {
(new AMessage(kWhatStopAudioSink, id()))->post();
}
@@ -607,6 +616,10 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
+ // Need to stop the track here, because that will play out the last
+ // little bit at the end of the file. Otherwise short files won't play.
+ mAudioSink->stop();
+ mNumFramesWritten = 0;
return false;
}
@@ -663,6 +676,11 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
break;
}
}
+ mAnchorMaxMediaUs =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
+
return !mAudioQueue.empty();
}
@@ -674,7 +692,7 @@ int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
int64_t currentPositionUs;
- if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
+ if (getCurrentPosition(&currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
// If failed to get current position, e.g. due to audio clock is not ready, then just
// play out video immediately without delay.
return nowUs;
@@ -690,7 +708,8 @@ void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
}
setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs));
+ setAnchorTime(
+ mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
}
void NuPlayer::Renderer::postDrainVideoQueue() {
@@ -733,6 +752,9 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
} else {
realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
}
+ if (!mHasAudio) {
+ mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
+ }
// Heuristics to handle situation when media time changed without a
// discontinuity. If we have not drained an audio buffer that was
@@ -761,11 +783,6 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
// post 2 display refreshes before rendering is due
- // FIXME currently this increases power consumption, so unless frame-accurate
- // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
- if (!sFrameAccurateAVsync) {
- twoVsyncsUs >>= 4;
- }
msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
mDrainVideoQueuePending = true;
@@ -1103,6 +1120,7 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
+ mAnchorNumFramesWritten = -1;
uint32_t written;
if (mAudioSink->getFramesWritten(&written) == OK) {
mNumFramesWritten = written;
@@ -1142,8 +1160,6 @@ void NuPlayer::Renderer::onPause() {
}
void NuPlayer::Renderer::onResume() {
- readProperties();
-
if (!mPaused) {
return;
}
@@ -1158,7 +1174,8 @@ void NuPlayer::Renderer::onResume() {
if (mPauseStartedTimeRealUs != -1) {
int64_t newAnchorRealUs =
mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
- setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */);
+ setAnchorTime(
+ mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
}
if (!mAudioQueue.empty()) {
@@ -1341,8 +1358,9 @@ bool NuPlayer::Renderer::onOpenAudioSink(
return offloadingAudio();
}
ALOGV("openAudioSink: try to open AudioSink in offload mode");
- flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
- flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ uint32_t offloadFlags = flags;
+ offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
audioSinkChanged = true;
mAudioSink->close();
err = mAudioSink->open(
@@ -1353,7 +1371,7 @@ bool NuPlayer::Renderer::onOpenAudioSink(
8 /* bufferCount */,
&NuPlayer::Renderer::AudioSinkCallback,
this,
- (audio_output_flags_t)flags,
+ (audio_output_flags_t)offloadFlags,
&offloadInfo);
if (err == OK) {
@@ -1377,9 +1395,9 @@ bool NuPlayer::Renderer::onOpenAudioSink(
}
}
if (!offloadOnly && !offloadingAudio()) {
- flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
ALOGV("openAudioSink: open AudioSink in NON-offload mode");
-
+ uint32_t pcmFlags = flags;
+ pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
audioSinkChanged = true;
mAudioSink->close();
mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
@@ -1391,7 +1409,7 @@ bool NuPlayer::Renderer::onOpenAudioSink(
8 /* bufferCount */,
NULL,
NULL,
- (audio_output_flags_t)flags),
+ (audio_output_flags_t)pcmFlags),
(status_t)OK);
mAudioSink->start();
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index b15a266..7b46a59 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -61,11 +61,13 @@ struct NuPlayer::Renderer : public AHandler {
// Following setters and getters are protected by mTimeLock.
status_t getCurrentPosition(int64_t *mediaUs);
- status_t getCurrentPosition(int64_t *mediaUs, int64_t nowUs);
+ status_t getCurrentPosition(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
void setHasMedia(bool audio);
void setAudioFirstAnchorTime(int64_t mediaUs);
void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
- void setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume = false);
+ void setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false);
void setVideoLateByUs(int64_t lateUs);
int64_t getVideoLateByUs();
void setPauseStartedTimeRealUs(int64_t realUs);
@@ -148,6 +150,8 @@ private:
int64_t mAudioFirstAnchorTimeMediaUs;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
+ int64_t mAnchorNumFramesWritten;
+ int64_t mAnchorMaxMediaUs;
int64_t mVideoLateByUs;
bool mHasAudio;
bool mHasVideo;
@@ -174,6 +178,7 @@ private:
int32_t mTotalBuffersQueued;
int32_t mLastAudioBufferDrained;
+
size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index ffacb8f..52ae9ee 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -506,7 +506,7 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
source->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK,
+ ATSParser::DISCONTINUITY_TIME,
NULL,
true /* discard */);
}
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 2e9a29a..27f5159 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -80,7 +80,7 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mFinalResult = ERROR_END_OF_STREAM;
break;
} else if (n == INFO_DISCONTINUITY) {
- int32_t type = ATSParser::DISCONTINUITY_SEEK;
+ int32_t type = ATSParser::DISCONTINUITY_TIME;
int32_t mask;
if (extra != NULL
@@ -118,7 +118,7 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mTSParser->signalDiscontinuity(
((type & 1) == 0)
- ? ATSParser::DISCONTINUITY_SEEK
+ ? ATSParser::DISCONTINUITY_TIME
: ATSParser::DISCONTINUITY_FORMATCHANGE,
extra);
} else {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index eff5d77..26284e9 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -498,6 +498,10 @@ void ACodec::initiateShutdown(bool keepComponentAllocated) {
sp<AMessage> msg = new AMessage(kWhatShutdown, id());
msg->setInt32("keepComponentAllocated", keepComponentAllocated);
msg->post();
+ if (!keepComponentAllocated) {
+ // ensure shutdown completes in 3 seconds
+ (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000);
+ }
}
void ACodec::signalRequestIDRFrame() {
@@ -2217,7 +2221,11 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
- video_def->eColorFormat = colorFormat;
+ // this is redundant as it was already set up in setVideoPortFormatType
+ // FIXME for now skip this only for flexible YUV formats
+ if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
+ video_def->eColorFormat = colorFormat;
+ }
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
@@ -3797,6 +3805,19 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ ALOGI("[%s] forcing the release of codec",
+ mCodec->mComponentName.c_str());
+ status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
+ ALOGE_IF("[%s] failed to release codec instance: err=%d",
+ mCodec->mComponentName.c_str(), err);
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ break;
+ }
+
default:
return false;
}
@@ -4456,6 +4477,13 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ // nothing to do, as we have already signaled shutdown
+ handled = true;
+ break;
+ }
+
default:
return BaseState::onMessageReceived(msg);
}
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index ab8ac79..6a56729 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -75,7 +75,7 @@ static const size_t kHighWaterMarkBytes = 200000;
// maximum time in paused state when offloading audio decompression. When elapsed, the AudioPlayer
// is destroyed to allow the audio DSP to power down.
-static int64_t kOffloadPauseMaxUs = 60000000ll;
+static int64_t kOffloadPauseMaxUs = 10000000ll;
struct AwesomeEvent : public TimedEventQueue::Event {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 853e8fc..ad12bdd 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -130,6 +130,7 @@ static int32_t getColorFormat(const char* colorFormat) {
"CameraSource::getColorFormat", colorFormat);
CHECK(!"Unknown color format");
+ return -1;
}
CameraSource *CameraSource::Create(const String16 &clientName) {
@@ -677,63 +678,80 @@ void CameraSource::stopCameraRecording() {
void CameraSource::releaseCamera() {
ALOGV("releaseCamera");
- if (mCamera != 0) {
+ sp<Camera> camera;
+ bool coldCamera = false;
+ {
+ Mutex::Autolock autoLock(mLock);
+ // get a local ref and clear ref to mCamera now
+ camera = mCamera;
+ mCamera.clear();
+ coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
+ }
+
+ if (camera != 0) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ if (coldCamera) {
ALOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- mCamera->disconnect();
+ camera->stopPreview();
+ camera->disconnect();
}
- mCamera->unlock();
- mCamera.clear();
- mCamera = 0;
+ camera->unlock();
IPCThreadState::self()->restoreCallingIdentity(token);
}
- if (mCameraRecordingProxy != 0) {
- IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
- mCameraRecordingProxy.clear();
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mCameraRecordingProxy != 0) {
+ IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
+ mCameraRecordingProxy.clear();
+ }
+ mCameraFlags = 0;
}
- mCameraFlags = 0;
}
status_t CameraSource::reset() {
ALOGD("reset: E");
- Mutex::Autolock autoLock(mLock);
- mStarted = false;
- mFrameAvailableCondition.signal();
- int64_t token;
- bool isTokenValid = false;
- if (mCamera != 0) {
- token = IPCThreadState::self()->clearCallingIdentity();
- isTokenValid = true;
- }
- releaseQueuedFrames();
- while (!mFramesBeingEncoded.empty()) {
- if (NO_ERROR !=
- mFrameCompleteCondition.waitRelative(mLock,
- mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
- ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
- mFramesBeingEncoded.size());
+ {
+ Mutex::Autolock autoLock(mLock);
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
+ int64_t token;
+ bool isTokenValid = false;
+ if (mCamera != 0) {
+ token = IPCThreadState::self()->clearCallingIdentity();
+ isTokenValid = true;
+ }
+ releaseQueuedFrames();
+ while (!mFramesBeingEncoded.empty()) {
+ if (NO_ERROR !=
+ mFrameCompleteCondition.waitRelative(mLock,
+ mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+ ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
+ mFramesBeingEncoded.size());
+ }
+ }
+ stopCameraRecording();
+ if (isTokenValid) {
+ IPCThreadState::self()->restoreCallingIdentity(token);
}
- }
- stopCameraRecording();
- releaseCamera();
- if (isTokenValid) {
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
- if (mCollectStats) {
- ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
- mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
- mLastFrameTimestampUs - mFirstFrameTimeUs);
- }
+ if (mCollectStats) {
+ ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
+ mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
+ mLastFrameTimestampUs - mFirstFrameTimeUs);
+ }
- if (mNumGlitches > 0) {
- ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ if (mNumGlitches > 0) {
+ ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ }
+
+ CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
}
- CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+ releaseCamera();
+
ALOGD("reset: X");
return OK;
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index f6525ae..9f20b1d 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -41,6 +41,11 @@
#include "include/ESDS.h"
+
+#ifndef __predict_false
+#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
+#endif
+
#define WARN_UNLESS(condition, message, ...) \
( (__predict_false(condition)) ? false : ({ \
ALOGW("Condition %s failed " message, #condition, ##__VA_ARGS__); \
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5f55484..11069e4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -738,6 +738,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
err, actionCode, mState);
if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
+ mFlags &= ~kFlagIsComponentAllocated;
}
bool sendErrorResponse = true;
@@ -863,6 +864,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
{
CHECK_EQ(mState, INITIALIZING);
setState(INITIALIZED);
+ mFlags |= kFlagIsComponentAllocated;
CHECK(msg->findString("componentName", &mComponentName));
@@ -1009,6 +1011,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
mFlags |= kFlagOutputFormatChanged;
postActivityNotificationIfPossible();
}
+
+ // Notify mCrypto of video resolution changes
+ if (mCrypto != NULL) {
+ int32_t height, width;
+ if (mOutputFormat->findInt32("height", &height) &&
+ mOutputFormat->findInt32("width", &width)) {
+ mCrypto->notifyResolution(width, height);
+ }
+ }
+
break;
}
@@ -1136,6 +1148,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
setState(UNINITIALIZED);
mComponentName.clear();
}
+ mFlags &= ~kFlagIsComponentAllocated;
(new AMessage)->postReply(mReplyID);
break;
@@ -1336,9 +1349,13 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != INITIALIZED
+ if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
+ && mState != INITIALIZED
&& mState != CONFIGURED && !isExecuting()) {
- // We may be in "UNINITIALIZED" state already without the
+ // 1) Permit release to shut down the component if allocated.
+ //
+ // 2) We may be in "UNINITIALIZED" state already and
+ // also shutdown the encoder/decoder without the
// client being aware of this if media server died while
// we were being stopped. The client would assume that
// after stop() returned, it would be safe to call release()
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0fecda8..c26e909 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -422,19 +422,11 @@ status_t MediaCodecSource::initEncoder() {
}
}
- err = mEncoder->start();
-
- if (err != OK) {
- return err;
- }
-
- err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+ mEncoderActivityNotify = new AMessage(
+ kWhatEncoderActivity, mReflector->id());
+ mEncoder->setCallback(mEncoderActivityNotify);
- if (err != OK) {
- return err;
- }
-
- err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+ err = mEncoder->start();
if (err != OK) {
return err;
@@ -461,14 +453,6 @@ void MediaCodecSource::releaseEncoder() {
mbuf->release();
}
}
-
- for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
- sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
- accessUnit->setMediaBufferBase(NULL);
- }
-
- mEncoderInputBuffers.clear();
- mEncoderOutputBuffers.clear();
}
status_t MediaCodecSource::postSynchronouslyAndReturnError(
@@ -539,20 +523,6 @@ void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
}
}
-void MediaCodecSource::scheduleDoMoreWork() {
- if (mDoMoreWorkPending) {
- return;
- }
-
- mDoMoreWorkPending = true;
-
- if (mEncoderActivityNotify == NULL) {
- mEncoderActivityNotify = new AMessage(
- kWhatEncoderActivity, mReflector->id());
- }
- mEncoder->requestActivityNotification(mEncoderActivityNotify);
-}
-
status_t MediaCodecSource::feedEncoderInputBuffers() {
while (!mInputBufferQueue.empty()
&& !mAvailEncoderInputIndices.empty()) {
@@ -587,16 +557,22 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {
#endif // DEBUG_DRIFT_TIME
}
+ sp<ABuffer> inbuf;
+ status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
+ if (err != OK || inbuf == NULL) {
+ mbuf->release();
+ signalEOS();
+ break;
+ }
+
size = mbuf->size();
- memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
- mbuf->data(), size);
+ memcpy(inbuf->data(), mbuf->data(), size);
if (mIsVideo) {
// video encoder will release MediaBuffer when done
// with underlying data.
- mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
- mbuf);
+ inbuf->setMediaBufferBase(mbuf);
} else {
mbuf->release();
}
@@ -615,113 +591,6 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {
return OK;
}
-status_t MediaCodecSource::doMoreWork(int32_t numInput, int32_t numOutput) {
- status_t err = OK;
-
- if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
- while (numInput-- > 0) {
- size_t bufferIndex;
- err = mEncoder->dequeueInputBuffer(&bufferIndex);
-
- if (err != OK) {
- break;
- }
-
- mAvailEncoderInputIndices.push_back(bufferIndex);
- }
-
- feedEncoderInputBuffers();
- }
-
- while (numOutput-- > 0) {
- size_t bufferIndex;
- size_t offset;
- size_t size;
- int64_t timeUs;
- uint32_t flags;
- native_handle_t* handle = NULL;
- err = mEncoder->dequeueOutputBuffer(
- &bufferIndex, &offset, &size, &timeUs, &flags);
-
- if (err != OK) {
- if (err == INFO_FORMAT_CHANGED) {
- continue;
- } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
- mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
- continue;
- }
-
- if (err == -EAGAIN) {
- err = OK;
- }
- break;
- }
- if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) {
- sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
-
- MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
- memcpy(mbuf->data(), outbuf->data(), outbuf->size());
-
- if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
- if (mIsVideo) {
- int64_t decodingTimeUs;
- if (mFlags & FLAG_USE_SURFACE_INPUT) {
- // GraphicBufferSource is supposed to discard samples
- // queued before start, and offset timeUs by start time
- CHECK_GE(timeUs, 0ll);
- // TODO:
- // Decoding time for surface source is unavailable,
- // use presentation time for now. May need to move
- // this logic into MediaCodec.
- decodingTimeUs = timeUs;
- } else {
- CHECK(!mDecodingTimeQueue.empty());
- decodingTimeUs = *(mDecodingTimeQueue.begin());
- mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
- }
- mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
-
- ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
- timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
- } else {
- int64_t driftTimeUs = 0;
-#if DEBUG_DRIFT_TIME
- CHECK(!mDriftTimeQueue.empty());
- driftTimeUs = *(mDriftTimeQueue.begin());
- mDriftTimeQueue.erase(mDriftTimeQueue.begin());
- mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
-#endif // DEBUG_DRIFT_TIME
- ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
- timeUs, timeUs / 1E6, driftTimeUs);
- }
- mbuf->meta_data()->setInt64(kKeyTime, timeUs);
- } else {
- mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
- }
- if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
- mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
- }
- mbuf->setObserver(this);
- mbuf->add_ref();
-
- {
- Mutex::Autolock autoLock(mOutputBufferLock);
- mOutputBufferQueue.push_back(mbuf);
- mOutputBufferCond.signal();
- }
- }
-
- mEncoder->releaseOutputBuffer(bufferIndex);
-
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- err = ERROR_END_OF_STREAM;
- break;
- }
- }
-
- return err;
-}
-
status_t MediaCodecSource::onStart(MetaData *params) {
if (mStopping) {
ALOGE("Failed to start while we're stopping");
@@ -749,7 +618,6 @@ status_t MediaCodecSource::onStart(MetaData *params) {
startTimeUs = -1ll;
}
resume(startTimeUs);
- scheduleDoMoreWork();
} else {
CHECK(mPuller != NULL);
sp<AMessage> notify = new AMessage(
@@ -793,37 +661,110 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
mInputBufferQueue.push_back(mbuf);
feedEncoderInputBuffers();
- scheduleDoMoreWork();
break;
}
case kWhatEncoderActivity:
{
- mDoMoreWorkPending = false;
-
if (mEncoder == NULL) {
break;
}
- int32_t numInput, numOutput;
+ int32_t cbID;
+ CHECK(msg->findInt32("callbackID", &cbID));
+ if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
+ int32_t index;
+ CHECK(msg->findInt32("index", &index));
+
+ mAvailEncoderInputIndices.push_back(index);
+ feedEncoderInputBuffers();
+ } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
+ int32_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ int32_t flags;
+ native_handle_t* handle = NULL;
+
+ CHECK(msg->findInt32("index", &index));
+ CHECK(msg->findSize("offset", &offset));
+ CHECK(msg->findSize("size", &size));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("flags", &flags));
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mEncoder->releaseOutputBuffer(index);
+ signalEOS();
+ break;
+ }
- if (!msg->findInt32("input-buffers", &numInput)) {
- numInput = INT32_MAX;
- }
- if (!msg->findInt32("output-buffers", &numOutput)) {
- numOutput = INT32_MAX;
- }
+ sp<ABuffer> outbuf;
+ status_t err = mEncoder->getOutputBuffer(index, &outbuf);
+ if (err != OK || outbuf == NULL) {
+ signalEOS();
+ break;
+ }
- status_t err = doMoreWork(numInput, numOutput);
+ MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+ memcpy(mbuf->data(), outbuf->data(), outbuf->size());
- if (err == OK) {
- scheduleDoMoreWork();
- } else {
- // reached EOS, or error
- signalEOS(err);
- }
+ if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
+ if (mIsVideo) {
+ int64_t decodingTimeUs;
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ // GraphicBufferSource is supposed to discard samples
+ // queued before start, and offset timeUs by start time
+ CHECK_GE(timeUs, 0ll);
+ // TODO:
+ // Decoding time for surface source is unavailable,
+ // use presentation time for now. May need to move
+ // this logic into MediaCodec.
+ decodingTimeUs = timeUs;
+ } else {
+ CHECK(!mDecodingTimeQueue.empty());
+ decodingTimeUs = *(mDecodingTimeQueue.begin());
+ mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
+ }
+ mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
- break;
+ ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
+ timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
+ } else {
+ int64_t driftTimeUs = 0;
+#if DEBUG_DRIFT_TIME
+ CHECK(!mDriftTimeQueue.empty());
+ driftTimeUs = *(mDriftTimeQueue.begin());
+ mDriftTimeQueue.erase(mDriftTimeQueue.begin());
+ mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
+ timeUs, timeUs / 1E6, driftTimeUs);
+ }
+ mbuf->meta_data()->setInt64(kKeyTime, timeUs);
+ } else {
+ mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
+ }
+ if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+ mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+ mbuf->setObserver(this);
+ mbuf->add_ref();
+
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ mOutputBufferQueue.push_back(mbuf);
+ mOutputBufferCond.signal();
+ }
+
+ mEncoder->releaseOutputBuffer(index);
+ } else if (cbID == MediaCodec::CB_ERROR) {
+ status_t err;
+ CHECK(msg->findInt32("err", &err));
+ ALOGE("Encoder (%s) reported error : 0x%x",
+ mIsVideo ? "video" : "audio", err);
+ signalEOS();
+ }
+ break;
}
case kWhatStart:
{
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index a8806c8..f26563e 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -47,10 +47,11 @@
#include <media/stagefright/SkipCutBuffer.h>
#include <utils/Vector.h>
-#include <OMX_Audio.h>
#include <OMX_AudioExt.h>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
+#include <OMX_VideoExt.h>
+#include <OMX_AsString.h>
#include "include/avc_utils.h"
@@ -817,6 +818,7 @@ static size_t getFrameSize(
CHECK(!"Should not be here. Unsupported color format.");
break;
}
+ return 0;
}
status_t OMXCodec::findTargetColorFormat(
@@ -4077,220 +4079,6 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
CHECK(!"should not be here.");
}
-static const char *imageCompressionFormatString(OMX_IMAGE_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_IMAGE_CodingUnused",
- "OMX_IMAGE_CodingAutoDetect",
- "OMX_IMAGE_CodingJPEG",
- "OMX_IMAGE_CodingJPEG2K",
- "OMX_IMAGE_CodingEXIF",
- "OMX_IMAGE_CodingTIFF",
- "OMX_IMAGE_CodingGIF",
- "OMX_IMAGE_CodingPNG",
- "OMX_IMAGE_CodingLZW",
- "OMX_IMAGE_CodingBMP",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *colorFormatString(OMX_COLOR_FORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_COLOR_FormatUnused",
- "OMX_COLOR_FormatMonochrome",
- "OMX_COLOR_Format8bitRGB332",
- "OMX_COLOR_Format12bitRGB444",
- "OMX_COLOR_Format16bitARGB4444",
- "OMX_COLOR_Format16bitARGB1555",
- "OMX_COLOR_Format16bitRGB565",
- "OMX_COLOR_Format16bitBGR565",
- "OMX_COLOR_Format18bitRGB666",
- "OMX_COLOR_Format18bitARGB1665",
- "OMX_COLOR_Format19bitARGB1666",
- "OMX_COLOR_Format24bitRGB888",
- "OMX_COLOR_Format24bitBGR888",
- "OMX_COLOR_Format24bitARGB1887",
- "OMX_COLOR_Format25bitARGB1888",
- "OMX_COLOR_Format32bitBGRA8888",
- "OMX_COLOR_Format32bitARGB8888",
- "OMX_COLOR_FormatYUV411Planar",
- "OMX_COLOR_FormatYUV411PackedPlanar",
- "OMX_COLOR_FormatYUV420Planar",
- "OMX_COLOR_FormatYUV420PackedPlanar",
- "OMX_COLOR_FormatYUV420SemiPlanar",
- "OMX_COLOR_FormatYUV422Planar",
- "OMX_COLOR_FormatYUV422PackedPlanar",
- "OMX_COLOR_FormatYUV422SemiPlanar",
- "OMX_COLOR_FormatYCbYCr",
- "OMX_COLOR_FormatYCrYCb",
- "OMX_COLOR_FormatCbYCrY",
- "OMX_COLOR_FormatCrYCbY",
- "OMX_COLOR_FormatYUV444Interleaved",
- "OMX_COLOR_FormatRawBayer8bit",
- "OMX_COLOR_FormatRawBayer10bit",
- "OMX_COLOR_FormatRawBayer8bitcompressed",
- "OMX_COLOR_FormatL2",
- "OMX_COLOR_FormatL4",
- "OMX_COLOR_FormatL8",
- "OMX_COLOR_FormatL16",
- "OMX_COLOR_FormatL24",
- "OMX_COLOR_FormatL32",
- "OMX_COLOR_FormatYUV420PackedSemiPlanar",
- "OMX_COLOR_FormatYUV422PackedSemiPlanar",
- "OMX_COLOR_Format18BitBGR666",
- "OMX_COLOR_Format24BitARGB6666",
- "OMX_COLOR_Format24BitABGR6666",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
- return "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar";
- } else if (type == OMX_QCOM_COLOR_FormatYVU420SemiPlanar) {
- return "OMX_QCOM_COLOR_FormatYVU420SemiPlanar";
- } else if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *videoCompressionFormatString(OMX_VIDEO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_VIDEO_CodingUnused",
- "OMX_VIDEO_CodingAutoDetect",
- "OMX_VIDEO_CodingMPEG2",
- "OMX_VIDEO_CodingH263",
- "OMX_VIDEO_CodingMPEG4",
- "OMX_VIDEO_CodingWMV",
- "OMX_VIDEO_CodingRV",
- "OMX_VIDEO_CodingAVC",
- "OMX_VIDEO_CodingMJPEG",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_CodingUnused",
- "OMX_AUDIO_CodingAutoDetect",
- "OMX_AUDIO_CodingPCM",
- "OMX_AUDIO_CodingADPCM",
- "OMX_AUDIO_CodingAMR",
- "OMX_AUDIO_CodingGSMFR",
- "OMX_AUDIO_CodingGSMEFR",
- "OMX_AUDIO_CodingGSMHR",
- "OMX_AUDIO_CodingPDCFR",
- "OMX_AUDIO_CodingPDCEFR",
- "OMX_AUDIO_CodingPDCHR",
- "OMX_AUDIO_CodingTDMAFR",
- "OMX_AUDIO_CodingTDMAEFR",
- "OMX_AUDIO_CodingQCELP8",
- "OMX_AUDIO_CodingQCELP13",
- "OMX_AUDIO_CodingEVRC",
- "OMX_AUDIO_CodingSMV",
- "OMX_AUDIO_CodingG711",
- "OMX_AUDIO_CodingG723",
- "OMX_AUDIO_CodingG726",
- "OMX_AUDIO_CodingG729",
- "OMX_AUDIO_CodingAAC",
- "OMX_AUDIO_CodingMP3",
- "OMX_AUDIO_CodingSBC",
- "OMX_AUDIO_CodingVORBIS",
- "OMX_AUDIO_CodingOPUS",
- "OMX_AUDIO_CodingWMA",
- "OMX_AUDIO_CodingRA",
- "OMX_AUDIO_CodingMIDI",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioPCMModeString(OMX_AUDIO_PCMMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_PCMModeLinear",
- "OMX_AUDIO_PCMModeALaw",
- "OMX_AUDIO_PCMModeMULaw",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrBandModeString(OMX_AUDIO_AMRBANDMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRBandModeUnused",
- "OMX_AUDIO_AMRBandModeNB0",
- "OMX_AUDIO_AMRBandModeNB1",
- "OMX_AUDIO_AMRBandModeNB2",
- "OMX_AUDIO_AMRBandModeNB3",
- "OMX_AUDIO_AMRBandModeNB4",
- "OMX_AUDIO_AMRBandModeNB5",
- "OMX_AUDIO_AMRBandModeNB6",
- "OMX_AUDIO_AMRBandModeNB7",
- "OMX_AUDIO_AMRBandModeWB0",
- "OMX_AUDIO_AMRBandModeWB1",
- "OMX_AUDIO_AMRBandModeWB2",
- "OMX_AUDIO_AMRBandModeWB3",
- "OMX_AUDIO_AMRBandModeWB4",
- "OMX_AUDIO_AMRBandModeWB5",
- "OMX_AUDIO_AMRBandModeWB6",
- "OMX_AUDIO_AMRBandModeWB7",
- "OMX_AUDIO_AMRBandModeWB8",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrFrameFormatString(OMX_AUDIO_AMRFRAMEFORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRFrameFormatConformance",
- "OMX_AUDIO_AMRFrameFormatIF1",
- "OMX_AUDIO_AMRFrameFormatIF2",
- "OMX_AUDIO_AMRFrameFormatFSF",
- "OMX_AUDIO_AMRFrameFormatRTPPayload",
- "OMX_AUDIO_AMRFrameFormatITU",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -4321,10 +4109,10 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nStride = %" PRIu32 "\n", imageDef->nStride);
printf(" eCompressionFormat = %s\n",
- imageCompressionFormatString(imageDef->eCompressionFormat));
+ asString(imageDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(imageDef->eColorFormat));
+ asString(imageDef->eColorFormat));
break;
}
@@ -4340,10 +4128,10 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nStride = %" PRIu32 "\n", videoDef->nStride);
printf(" eCompressionFormat = %s\n",
- videoCompressionFormatString(videoDef->eCompressionFormat));
+ asString(videoDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(videoDef->eColorFormat));
+ asString(videoDef->eColorFormat));
break;
}
@@ -4355,7 +4143,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("\n");
printf(" // Audio\n");
printf(" eEncoding = %s\n",
- audioCodingTypeString(audioDef->eEncoding));
+ asString(audioDef->eEncoding));
if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) {
OMX_AUDIO_PARAM_PCMMODETYPE params;
@@ -4375,7 +4163,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
params.eNumData == OMX_NumericalDataSigned
? "signed" : "unsigned");
- printf(" ePCMMode = %s\n", audioPCMModeString(params.ePCMMode));
+ printf(" ePCMMode = %s\n", asString(params.ePCMMode));
} else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) {
OMX_AUDIO_PARAM_AMRTYPE amr;
InitOMXParams(&amr);
@@ -4387,9 +4175,9 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf(" nChannels = %" PRIu32 "\n", amr.nChannels);
printf(" eAMRBandMode = %s\n",
- amrBandModeString(amr.eAMRBandMode));
+ asString(amr.eAMRBandMode));
printf(" eAMRFrameFormat = %s\n",
- amrFrameFormatString(amr.eAMRFrameFormat));
+ asString(amr.eAMRFrameFormat));
}
break;
@@ -4698,12 +4486,7 @@ status_t QueryCodec(
const char *componentName, const char *mime,
bool isEncoder,
CodecCapabilities *caps) {
- if (strncmp(componentName, "OMX.", 4)) {
- // Not an OpenMax component but a software codec.
- caps->mFlags = 0;
- caps->mComponentName = componentName;
- return OK;
- }
+ bool isVideo = !strncasecmp(mime, "video/", 6);
sp<OMXCodecObserver> observer = new OMXCodecObserver;
IOMX::node_id node;
@@ -4718,59 +4501,62 @@ status_t QueryCodec(
caps->mFlags = 0;
caps->mComponentName = componentName;
- OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
- InitOMXParams(&param);
+ // NOTE: OMX does not provide a way to query AAC profile support
+ if (isVideo) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(&param);
- param.nPortIndex = !isEncoder ? 0 : 1;
+ param.nPortIndex = !isEncoder ? 0 : 1;
- for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoProfileLevelQuerySupported,
- &param, sizeof(param));
-
- if (err != OK) {
- break;
- }
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoProfileLevelQuerySupported,
+ &param, sizeof(param));
- CodecProfileLevel profileLevel;
- profileLevel.mProfile = param.eProfile;
- profileLevel.mLevel = param.eLevel;
+ if (err != OK) {
+ break;
+ }
- caps->mProfileLevels.push(profileLevel);
- }
+ CodecProfileLevel profileLevel;
+ profileLevel.mProfile = param.eProfile;
+ profileLevel.mLevel = param.eLevel;
- // Color format query
- // return colors in the order reported by the OMX component
- // prefix "flexible" standard ones with the flexible equivalent
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- InitOMXParams(&portFormat);
- portFormat.nPortIndex = !isEncoder ? 1 : 0;
- for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoPortFormat,
- &portFormat, sizeof(portFormat));
- if (err != OK) {
- break;
+ caps->mProfileLevels.push(profileLevel);
}
- OMX_U32 flexibleEquivalent;
- if (ACodec::isFlexibleColorFormat(
- omx, node, portFormat.eColorFormat, &flexibleEquivalent)) {
- bool marked = false;
- for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
- if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
- marked = true;
- break;
- }
+ // Color format query
+ // return colors in the order reported by the OMX component
+ // prefix "flexible" standard ones with the flexible equivalent
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = !isEncoder ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
}
- if (!marked) {
- caps->mColorFormats.push(flexibleEquivalent);
+
+ OMX_U32 flexibleEquivalent;
+ if (ACodec::isFlexibleColorFormat(
+ omx, node, portFormat.eColorFormat, &flexibleEquivalent)) {
+ bool marked = false;
+ for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
+ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
+ marked = true;
+ break;
+ }
+ }
+ if (!marked) {
+ caps->mColorFormats.push(flexibleEquivalent);
+ }
}
+ caps->mColorFormats.push(portFormat.eColorFormat);
}
- caps->mColorFormats.push(portFormat.eColorFormat);
}
- if (!isEncoder && !strncmp(mime, "video/", 6)) {
+ if (isVideo && !isEncoder) {
if (omx->storeMetaDataInBuffers(
node, 1 /* port index */, OMX_TRUE) == OK ||
omx->prepareForAdaptivePlayback(
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 821bd81..6219053 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -38,6 +38,7 @@ extern "C" {
int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
+ long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op);
}
namespace android {
@@ -84,6 +85,8 @@ struct MyVorbisExtractor {
private:
struct Page {
uint64_t mGranulePosition;
+ int32_t mPrevPacketSize;
+ uint64_t mPrevPacketPos;
uint32_t mSerialNo;
uint32_t mPageNo;
uint8_t mFlags;
@@ -121,6 +124,8 @@ private:
status_t verifyHeader(
MediaBuffer *buffer, uint8_t type);
+ int32_t packetBlockSize(MediaBuffer *buffer);
+
void parseFileMetaData();
status_t findPrevGranulePosition(off64_t pageOffset, uint64_t *granulePos);
@@ -373,6 +378,7 @@ status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
mFirstPacketInPage = true;
mCurrentPageSamples = 0;
mCurrentPage.mNumSegments = 0;
+ mCurrentPage.mPrevPacketSize = -1;
mNextLaceIndex = 0;
// XXX what if new page continues packet from last???
@@ -489,16 +495,6 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
tmp->set_range(0, buffer->range_length());
buffer->release();
} else {
- // XXX Not only is this not technically the correct time for
- // this packet, we also stamp every packet in this page
- // with the same time. This needs fixing later.
-
- if (mVi.rate) {
- // Rate may not have been initialized yet if we're currently
- // reading the configuration packets...
- // Fortunately, the timestamp doesn't matter for those.
- timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate;
- }
tmp->set_range(0, 0);
}
buffer = tmp;
@@ -521,16 +517,34 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
if (gotFullPacket) {
// We've just read the entire packet.
- if (timeUs >= 0) {
- buffer->meta_data()->setInt64(kKeyTime, timeUs);
- }
-
if (mFirstPacketInPage) {
buffer->meta_data()->setInt32(
kKeyValidSamples, mCurrentPageSamples);
mFirstPacketInPage = false;
}
+ if (mVi.rate) {
+ // Rate may not have been initialized yet if we're currently
+ // reading the configuration packets...
+ // Fortunately, the timestamp doesn't matter for those.
+ int32_t curBlockSize = packetBlockSize(buffer);
+ if (mCurrentPage.mPrevPacketSize < 0) {
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ mCurrentPage.mPrevPacketPos =
+ mCurrentPage.mGranulePosition - mCurrentPageSamples;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ } else {
+ // The effective block size is the average of the two overlapped blocks
+ int32_t actualBlockSize =
+ (curBlockSize + mCurrentPage.mPrevPacketSize) / 2;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ // The actual size output by the decoder will be half the effective
+ // size, due to the overlap
+ mCurrentPage.mPrevPacketPos += actualBlockSize / 2;
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ }
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+ }
*out = buffer;
return OK;
@@ -686,6 +700,35 @@ void MyVorbisExtractor::buildTableOfContents() {
}
}
+int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) {
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+
+ size_t size = buffer->range_length();
+
+ ogg_buffer buf;
+ buf.data = (uint8_t *)data;
+ buf.size = size;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = size;
+ ref.next = NULL;
+
+ ogg_packet pack;
+ pack.packet = &ref;
+ pack.bytes = ref.length;
+ pack.b_o_s = 0;
+ pack.e_o_s = 0;
+ pack.granulepos = 0;
+ pack.packetno = 0;
+
+ return vorbis_packet_blocksize(&mVi, &pack);
+}
+
status_t MyVorbisExtractor::verifyHeader(
MediaBuffer *buffer, uint8_t type) {
const uint8_t *data =
@@ -730,6 +773,10 @@ status_t MyVorbisExtractor::verifyHeader(
ALOGV("upper-bitrate = %ld", mVi.bitrate_upper);
ALOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
ALOGV("window-bitrate = %ld", mVi.bitrate_window);
+ ALOGV("blocksizes: %d/%d",
+ vorbis_info_blocksize(&mVi, 0),
+ vorbis_info_blocksize(&mVi, 1)
+ );
off64_t size;
if (mSource->getSize(&size) == OK) {
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 4e1c65c..530383b 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -448,7 +448,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
}
// Part of the BufferQueue::ConsumerListener
-void SurfaceMediaSource::onFrameAvailable() {
+void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
ALOGV("onFrameAvailable");
sp<FrameAvailableListener> listener;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 40925fd..351ba1e 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -929,33 +929,22 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
}
if (mEndOfInput) {
- if (outputDelayRingBufferSamplesAvailable() > 0
- && outputDelayRingBufferSamplesAvailable()
- < mStreamInfo->frameSize * mStreamInfo->numChannels) {
- ALOGE("not a complete frame of samples available");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
-
- if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ int ringBufAvail = outputDelayRingBufferSamplesAvailable();
+ if (!outQueue.empty()
+ && ringBufAvail < mStreamInfo->frameSize * mStreamInfo->numChannels) {
if (!mEndOfOutput) {
- // send empty block signaling EOS
+ // send partial or empty block signaling EOS
mEndOfOutput = true;
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- if (outHeader->nOffset != 0) {
- ALOGE("outHeader->nOffset != 0 is not handled");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
-
INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
+ outHeader->nOffset);
- int32_t ns = 0;
- outHeader->nFilledLen = 0;
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, ringBufAvail);
+ if (ns < 0) {
+ ns = 0;
+ }
+ outHeader->nFilledLen = ns;
outHeader->nFlags = OMX_BUFFERFLAG_EOS;
outHeader->nTimeStamp = mBufferTimestamps.itemAt(0);
@@ -994,7 +983,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
}
int32_t ns = outputDelayRingBufferGetSamples(0, avail);
if (ns != avail) {
- ALOGE("not a complete frame of samples available");
+ ALOGW("not a complete frame of samples available");
break;
}
mOutputBufferCount++;
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 1513b0b..cfc37b7 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -575,9 +575,13 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
&editPortInfo(0)->mDef;
portDef->format.video.nFrameWidth = mVideoWidth;
portDef->format.video.nFrameHeight = mVideoHeight;
+ portDef->format.video.nStride = portDef->format.video.nFrameWidth;
+ portDef->format.video.nSliceHeight = portDef->format.video.nFrameHeight;
portDef->format.video.xFramerate = def->format.video.xFramerate;
portDef->format.video.eColorFormat =
(OMX_COLOR_FORMATTYPE) mVideoColorFormat;
+ portDef->nBufferSize =
+ (portDef->format.video.nStride * portDef->format.video.nSliceHeight * 3) / 2;
portDef = &editPortInfo(1)->mDef;
portDef->format.video.nFrameWidth = mVideoWidth;
portDef->format.video.nFrameHeight = mVideoHeight;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index 1f4b6fd..e399984 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -382,5 +382,6 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 28edff8..1d0a2f0 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -462,9 +462,13 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
&editPortInfo(0)->mDef;
portDef->format.video.nFrameWidth = mVideoWidth;
portDef->format.video.nFrameHeight = mVideoHeight;
+ portDef->format.video.nStride = portDef->format.video.nFrameWidth;
+ portDef->format.video.nSliceHeight = portDef->format.video.nFrameHeight;
portDef->format.video.xFramerate = def->format.video.xFramerate;
portDef->format.video.eColorFormat =
(OMX_COLOR_FORMATTYPE) mVideoColorFormat;
+ portDef->nBufferSize =
+ (portDef->format.video.nStride * portDef->format.video.nSliceHeight * 3) / 2;
portDef = &editPortInfo(1)->mDef;
portDef->format.video.nFrameWidth = mVideoWidth;
portDef->format.video.nFrameHeight = mVideoHeight;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 828577a..87d6961 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -189,4 +189,5 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index eb621d5..0285feb 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -805,8 +805,12 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
def->format.video.xFramerate = mFramerate;
def->format.video.eColorFormat = mColorFormat;
+ def->nBufferSize =
+ (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
def = &editPortInfo(kOutputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
def->format.video.nFrameHeight = mHeight;
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp
new file mode 100644
index 0000000..ec4a960
--- /dev/null
+++ b/media/libstagefright/foundation/ADebug.cpp
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <ctype.h>
+
+#define LOG_TAG "ADebug"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <cutils/properties.h>
+
+#include <ADebug.h>
+#include <AStringUtils.h>
+#include <AUtils.h>
+
+namespace android {
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromString(
+ const char *name, const char *value, ADebug::Level def) {
+ // split on ,
+ const char *next = value, *current;
+ const unsigned long maxLevel = (unsigned long)kDebugMax;
+ while (next != NULL) {
+ current = next;
+ next = strchr(current, ',');
+ if (next != NULL) {
+ ++next; // pass ,
+ }
+
+ while (isspace(*current)) {
+ ++current;
+ }
+ // check for :
+ char *colon = strchr(current, ':');
+
+ // get level
+ char *end;
+ errno = 0; // strtoul does not clear errno, but it can be set for any return value
+ unsigned long level = strtoul(current, &end, 10);
+ while (isspace(*end)) {
+ ++end;
+ }
+ if (errno != 0 || end == current || (end != colon && *end != '\0' && end != next)) {
+ // invalid level - skip
+ continue;
+ }
+ if (colon != NULL) {
+ // check if pattern matches
+ do { // skip colon and spaces
+ ++colon;
+ } while (isspace(*colon));
+ size_t globLen = (next == NULL ? strlen(colon) : (next - 1 - colon));
+ while (globLen > 0 && isspace(colon[globLen - 1])) {
+ --globLen; // trim glob
+ }
+
+ if (!AStringUtils::MatchesGlob(
+ colon, globLen, name, strlen(name), true /* ignoreCase */)) {
+ continue;
+ }
+ }
+
+ // update debug level
+ def = (Level)min(level, maxLevel);
+ }
+ return def;
+}
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromProperty(
+ const char *name, const char *propertyName, ADebug::Level def) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get(propertyName, value, NULL)) {
+ return GetDebugLevelFromString(name, value, def);
+ }
+ return def;
+}
+
+//static
+char *ADebug::GetDebugName(const char *name) {
+ char *debugName = strdup(name);
+ const char *terms[] = { "omx", "video", "audio" };
+ for (size_t i = 0; i < NELEM(terms) && debugName != NULL; i++) {
+ const char *term = terms[i];
+ const size_t len = strlen(term);
+ char *match = strcasestr(debugName, term);
+ if (match != NULL && (match == debugName || match[-1] == '.'
+ || match[len] == '.' || match[len] == '\0')) {
+ char *src = match + len;
+ if (match == debugName || match[-1] == '.') {
+ src += (*src == '.'); // remove trailing or double .
+ }
+ memmove(match, src, debugName + strlen(debugName) - src + 1);
+ }
+ }
+
+ return debugName;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/AStringUtils.cpp b/media/libstagefright/foundation/AStringUtils.cpp
new file mode 100644
index 0000000..e5a846c
--- /dev/null
+++ b/media/libstagefright/foundation/AStringUtils.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <AStringUtils.h>
+
+namespace android {
+
+// static
+int AStringUtils::Compare(const char *a, const char *b, size_t len, bool ignoreCase) {
+ // this method relies on a trailing '\0' if a or b are shorter than len
+ return ignoreCase ? strncasecmp(a, b, len) : strncmp(a, b, len);
+}
+
+// static
+bool AStringUtils::MatchesGlob(
+ const char *glob, size_t globLen, const char *str, size_t strLen, bool ignoreCase) {
+ // this method does not assume a trailing '\0'
+ size_t ix = 0, globIx = 0;
+
+ // pattern must match until first '*'
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ if (strLen < globIx || Compare(str, glob, globIx /* len */, ignoreCase)) {
+ return false;
+ }
+ ix = globIx;
+
+ // process by * separated sections
+ while (globIx < globLen) {
+ ++globIx;
+ size_t start = globIx;
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ size_t len = globIx - start;
+ const char *pattern = glob + start;
+
+ if (globIx == globLen) {
+ // last pattern must match tail
+ if (ix + len > strLen) {
+ return false;
+ }
+ const char *tail = str + strLen - len;
+ return !Compare(tail, pattern, len, ignoreCase);
+ }
+ // progress after first occurrence of pattern
+ while (ix + len <= strLen && Compare(str + ix, pattern, len, ignoreCase)) {
+ ++ix;
+ }
+ if (ix + len > strLen) {
+ return false;
+ }
+ ix += len;
+ // we will loop around as globIx < globLen
+ }
+
+ // we only get here if there were no * in the pattern
+ return ix == strLen;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index 90a6a23..c1dd6ce 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -5,6 +5,7 @@ LOCAL_SRC_FILES:= \
AAtomizer.cpp \
ABitReader.cpp \
ABuffer.cpp \
+ ADebug.cpp \
AHandler.cpp \
AHierarchicalStateMachine.cpp \
ALooper.cpp \
@@ -12,6 +13,7 @@ LOCAL_SRC_FILES:= \
AMessage.cpp \
ANetworkSession.cpp \
AString.cpp \
+ AStringUtils.cpp \
ParsedMessage.cpp \
base64.cpp \
hexdump.cpp
@@ -22,6 +24,7 @@ LOCAL_C_INCLUDES:= \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libutils \
+ libcutils \
liblog
LOCAL_CFLAGS += -Wno-multichar -Werror
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index fba6b09..874c118 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1458,7 +1458,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
extra->setInt64("timeUs", timeUs);
discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
discontinuityQueue->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, extra, true);
+ ATSParser::DISCONTINUITY_TIME, extra, true);
} else {
int32_t type;
int64_t srcSegmentStartTimeUs;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 30fa868..e247550 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -700,8 +700,7 @@ status_t PlaylistFetcher::refreshPlaylist() {
mRefreshState = (RefreshState)(mRefreshState + 1);
}
} else {
- ALOGE("failed to load playlist at url '%s'", mURI.c_str());
- notifyError(ERROR_IO);
+ ALOGE("failed to load playlist at url '%s'", uriDebugString(mURI).c_str());
return ERROR_IO;
}
} else {
@@ -724,26 +723,25 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {
}
void PlaylistFetcher::onDownloadNext() {
- if (refreshPlaylist() != OK) {
- return;
- }
-
- int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
- "media-sequence", &firstSeqNumberInPlaylist)) {
- firstSeqNumberInPlaylist = 0;
- }
-
+ status_t err = refreshPlaylist();
+ int32_t firstSeqNumberInPlaylist = 0;
+ int32_t lastSeqNumberInPlaylist = 0;
bool discontinuity = false;
- const int32_t lastSeqNumberInPlaylist =
- firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+ if (mPlaylist != NULL) {
+ if (mPlaylist->meta() != NULL) {
+ mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist);
+ }
+
+ lastSeqNumberInPlaylist =
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
- if (mDiscontinuitySeq < 0) {
- mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ if (mDiscontinuitySeq < 0) {
+ mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ }
}
- if (mSeqNumber < 0) {
+ if (mPlaylist != NULL && mSeqNumber < 0) {
CHECK_GE(mStartTimeUs, 0ll);
if (mSegmentStartTimeUs < 0) {
@@ -785,19 +783,26 @@ void PlaylistFetcher::onDownloadNext() {
}
}
+ // if mPlaylist is NULL then err must be non-OK; but the other way around might not be true
if (mSeqNumber < firstSeqNumberInPlaylist
- || mSeqNumber > lastSeqNumberInPlaylist) {
- if (!mPlaylist->isComplete() && mNumRetries < kMaxNumRetries) {
+ || mSeqNumber > lastSeqNumberInPlaylist
+ || err != OK) {
+ if ((err != OK || !mPlaylist->isComplete()) && mNumRetries < kMaxNumRetries) {
++mNumRetries;
- if (mSeqNumber > lastSeqNumberInPlaylist) {
+ if (mSeqNumber > lastSeqNumberInPlaylist || err != OK) {
+ // make sure we reach this retry logic on refresh failures
+ // by adding an err != OK clause to all enclosing if's.
+
// refresh in increasing fraction (1/2, 1/3, ...) of the
// playlist's target duration or 3 seconds, whichever is less
- int32_t targetDurationSecs;
- CHECK(mPlaylist->meta()->findInt32(
- "target-duration", &targetDurationSecs));
- int64_t delayUs = mPlaylist->size() * targetDurationSecs *
- 1000000ll / (1 + mNumRetries);
+ int64_t delayUs = kMaxMonitorDelayUs;
+ if (mPlaylist != NULL && mPlaylist->meta() != NULL) {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ delayUs = mPlaylist->size() * targetDurationSecs *
+ 1000000ll / (1 + mNumRetries);
+ }
if (delayUs > kMaxMonitorDelayUs) {
delayUs = kMaxMonitorDelayUs;
}
@@ -809,7 +814,12 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- // we've missed the boat, let's start from the lowest sequence
+ if (err != OK) {
+ notifyError(err);
+ return;
+ }
+
+ // we've missed the boat, let's start 3 segments prior to the latest sequence
// number available and signal a discontinuity.
ALOGI("We've missed the boat, restarting playback."
@@ -963,8 +973,8 @@ void PlaylistFetcher::onDownloadNext() {
} while (bytesRead != 0);
if (bufferStartsWithTsSyncByte(buffer)) {
- // If we still don't see a stream after fetching a full ts segment mark it as
- // nonexistent.
+ // If we don't see a stream in the program table after fetching a full ts segment
+ // mark it as nonexistent.
const size_t kNumTypes = ATSParser::NUM_SOURCE_TYPES;
ATSParser::SourceType srcTypes[kNumTypes] =
{ ATSParser::VIDEO, ATSParser::AUDIO };
@@ -979,7 +989,7 @@ void PlaylistFetcher::onDownloadNext() {
static_cast<AnotherPacketSource *>(
mTSParser->getSource(srcType).get());
- if (source == NULL) {
+ if (!mTSParser->hasSource(srcType)) {
ALOGW("MPEG2 Transport stream does not contain %s data.",
srcType == ATSParser::VIDEO ? "video" : "audio");
@@ -996,7 +1006,7 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- status_t err = OK;
+ err = OK;
if (tsBuffer != NULL) {
AString method;
CHECK(buffer->meta()->findString("cipher-method", &method));
@@ -1155,7 +1165,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0);
mTSParser->signalDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, extra);
+ ATSParser::DISCONTINUITY_TIME, extra);
mAbsoluteTimeAnchorUs = mNextPTSTimeUs;
mNextPTSTimeUs = -1ll;
@@ -1587,6 +1597,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO);
mStartTimeUsNotify->post();
mStartTimeUsNotify.clear();
+ mStartup = false;
}
}
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 24d431c..104dcfc 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -31,7 +31,7 @@ struct GraphicBufferSource;
struct OMXNodeInstance {
OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer);
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name);
void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle);
@@ -149,6 +149,18 @@ private:
KeyedVector<OMX_BUFFERHEADERTYPE *, OMX::buffer_id> mBufferHeaderToBufferID;
#endif
+ // For debug support
+ char *mName;
+ int DEBUG;
+ size_t mNumPortBuffers[2]; // modified under mLock, read outside for debug
+ Mutex mDebugLock;
+ // following are modified and read under mDebugLock
+ int DEBUG_BUMP;
+ SortedVector<OMX_BUFFERHEADERTYPE *> mInputBuffersWithCodec, mOutputBuffersWithCodec;
+ size_t mDebugLevelBumpPendingBuffers[2];
+ void bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers);
+ void unbumpDebugLevel_l(size_t portIndex);
+
~OMXNodeInstance();
void addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id);
@@ -186,6 +198,10 @@ private:
OMX_U32 portIndex, OMX_BOOL enable,
OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta);
+ status_t emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr);
+
sp<GraphicBufferSource> getGraphicBufferSource();
void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 2587ec7..4f0862c 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -413,16 +413,16 @@ void BlockIterator::seek(
const mkvparser::CuePoint* pCP;
mkvparser::Tracks const *pTracks = pSegment->GetTracks();
- unsigned long int trackCount = pTracks->GetTracksCount();
while (!pCues->DoneParsing()) {
pCues->LoadCuePoint();
pCP = pCues->GetLast();
CHECK(pCP);
+ size_t trackCount = mExtractor->mTracks.size();
for (size_t index = 0; index < trackCount; ++index) {
- const mkvparser::Track *pTrack = pTracks->GetTrackByIndex(index);
+ MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(index);
+ const mkvparser::Track *pTrack = pTracks->GetTrackByNumber(track.mTrackNum);
if (pTrack && pTrack->GetType() == 1 && pCP->Find(pTrack)) { // VIDEO_TRACK
- MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(index);
track.mCuePoints.push_back(pCP);
}
}
@@ -434,12 +434,13 @@ void BlockIterator::seek(
}
const mkvparser::CuePoint::TrackPosition *pTP = NULL;
- const mkvparser::Track *thisTrack = pTracks->GetTrackByIndex(mIndex);
+ const mkvparser::Track *thisTrack = pTracks->GetTrackByNumber(mTrackNum);
if (thisTrack->GetType() == 1) { // video
MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(mIndex);
pTP = track.find(seekTimeNs);
} else {
// The Cue index is built around video keyframes
+ unsigned long int trackCount = pTracks->GetTracksCount();
for (size_t index = 0; index < trackCount; ++index) {
const mkvparser::Track *pTrack = pTracks->GetTrackByIndex(index);
if (pTrack && pTrack->GetType() == 1 && pCues->Find(seekTimeNs, pTrack, pCP, pTP)) {
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 6d8866a..482ccff 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -63,6 +63,7 @@ struct ATSParser::Program : public RefBase {
void signalEOS(status_t finalResult);
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
int64_t convertPTSToTimestamp(uint64_t PTS);
@@ -119,6 +120,9 @@ struct ATSParser::Stream : public RefBase {
sp<MediaSource> getSource(SourceType type);
+ bool isAudio() const;
+ bool isVideo() const;
+
protected:
virtual ~Stream();
@@ -146,9 +150,6 @@ private:
void extractAACFrames(const sp<ABuffer> &buffer);
- bool isAudio() const;
- bool isVideo() const;
-
DISALLOW_EVIL_CONSTRUCTORS(Stream);
};
@@ -244,11 +245,16 @@ struct StreamInfo {
status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x02u);
-
+ if (table_id != 0x02u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
unsigned section_syntax_indicator = br->getBits(1);
ALOGV(" section_syntax_indicator = %u", section_syntax_indicator);
- CHECK_EQ(section_syntax_indicator, 1u);
+ if (section_syntax_indicator != 1u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
CHECK_EQ(br->getBits(1), 0u);
MY_LOGV(" reserved = %u", br->getBits(2));
@@ -435,6 +441,19 @@ sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::Program::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mStreams.size(); ++i) {
+ const sp<Stream> &stream = mStreams.valueAt(i);
+ if (type == AUDIO && stream->isAudio()) {
+ return true;
+ } else if (type == VIDEO && stream->isVideo()) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) {
if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) {
if (!mFirstPTSValid) {
@@ -660,7 +679,7 @@ void ATSParser::Stream::signalDiscontinuity(
int64_t resumeAtMediaTimeUs =
mProgram->convertPTSToTimestamp(resumeAtPTS);
- extra->setInt64("resume-at-mediatimeUs", resumeAtMediaTimeUs);
+ extra->setInt64("resume-at-mediaTimeUs", resumeAtMediaTimeUs);
}
}
@@ -739,8 +758,10 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
CHECK_GE(optional_bytes_remaining, 5u);
- CHECK_EQ(br->getBits(4), PTS_DTS_flags);
-
+ if (br->getBits(4) != PTS_DTS_flags) {
+ ALOGE("PES data Error!");
+ return ERROR_MALFORMED;
+ }
PTS = ((uint64_t)br->getBits(3)) << 30;
CHECK_EQ(br->getBits(1), 1u);
PTS |= ((uint64_t)br->getBits(15)) << 15;
@@ -1003,8 +1024,10 @@ void ATSParser::signalEOS(status_t finalResult) {
void ATSParser::parseProgramAssociationTable(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x00u);
-
+ if (table_id != 0x00u) {
+ ALOGE("PAT data error!");
+ return ;
+ }
unsigned section_syntax_indictor = br->getBits(1);
ALOGV(" section_syntax_indictor = %u", section_syntax_indictor);
CHECK_EQ(section_syntax_indictor, 1u);
@@ -1074,7 +1097,9 @@ status_t ATSParser::parsePID(
sp<PSISection> section = mPSISections.valueAt(sectionIndex);
if (payload_unit_start_indicator) {
- CHECK(section->isEmpty());
+ if (!section->isEmpty()) {
+ return ERROR_UNSUPPORTED;
+ }
unsigned skip = br->getBits(8);
br->skipBits(skip * 8);
@@ -1203,7 +1228,10 @@ status_t ATSParser::parseTS(ABitReader *br) {
ALOGV("---");
unsigned sync_byte = br->getBits(8);
- CHECK_EQ(sync_byte, 0x47u);
+ if (sync_byte != 0x47u) {
+ ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
+ return BAD_VALUE;
+ }
if (br->getBits(1)) { // transport_error_indicator
// silently ignore.
@@ -1264,6 +1292,17 @@ sp<MediaSource> ATSParser::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mPrograms.size(); ++i) {
+ const sp<Program> &program = mPrograms.itemAt(i);
+ if (program->hasSource(type)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
bool ATSParser::PTSTimeDeltaEstablished() {
if (mPrograms.isEmpty()) {
return false;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index b5cc6ed..75d76dc 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -41,8 +41,6 @@ struct ATSParser : public RefBase {
DISCONTINUITY_ABSOLUTE_TIME = 8,
DISCONTINUITY_TIME_OFFSET = 16,
- DISCONTINUITY_SEEK = DISCONTINUITY_TIME,
-
// For legacy reasons this also implies a time discontinuity.
DISCONTINUITY_FORMATCHANGE =
DISCONTINUITY_AUDIO_FORMAT
@@ -76,6 +74,7 @@ struct ATSParser : public RefBase {
NUM_SOURCE_TYPES = 2
};
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
bool PTSTimeDeltaEstablished();
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index a03f6f9..c579d4c 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -265,8 +265,12 @@ void AnotherPacketSource::queueDiscontinuity(
mEOSResult = OK;
mLastQueuedTimeUs = 0;
mLatestEnqueuedMeta = NULL;
- ++mQueuedDiscontinuityCount;
+ if (type == ATSParser::DISCONTINUITY_NONE) {
+ return;
+ }
+
+ ++mQueuedDiscontinuityCount;
sp<ABuffer> buffer = new ABuffer(0);
buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
buffer->meta()->setMessage("extra", extra);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 3e70956..44c7edc 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -750,7 +750,7 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(
}
// BufferQueue::ConsumerListener callback
-void GraphicBufferSource::onFrameAvailable() {
+void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock autoLock(mMutex);
ALOGV("onFrameAvailable exec=%d avail=%zu",
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index c0860ab..c8e3775 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -137,7 +137,7 @@ protected:
// into the codec buffer, and call Empty[This]Buffer. If we're not yet
// executing or there's no codec buffer available, we just increment
// mNumFramesAvailable and return.
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// BufferQueue::ConsumerListener interface, called when the client has
// released one or more GraphicBuffers. We clear out the appropriate
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 43f0bc9..f8d38ff 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -225,7 +225,7 @@ status_t OMX::allocateNode(
*node = 0;
- OMXNodeInstance *instance = new OMXNodeInstance(this, observer);
+ OMXNodeInstance *instance = new OMXNodeInstance(this, observer, name);
OMX_COMPONENTTYPE *handle;
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index d07ec14..c04d95f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -18,11 +18,15 @@
#define LOG_TAG "OMXNodeInstance"
#include <utils/Log.h>
+#include <inttypes.h>
+
#include "../include/OMXNodeInstance.h"
#include "OMXMaster.h"
#include "GraphicBufferSource.h"
#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AsString.h>
#include <binder/IMemory.h>
#include <gui/BufferQueue.h>
@@ -30,7 +34,68 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
static const OMX_U32 kPortIndexInput = 0;
+static const OMX_U32 kPortIndexOutput = 1;
+
+#define CLOGW(fmt, ...) ALOGW("[%x:%s] " fmt, mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_ERROR_IF(cond, fn, err, fmt, ...) \
+ ALOGE_IF(cond, #fn "(%x:%s, " fmt ") ERROR: %s(%#x)", \
+ mNodeID, mName, ##__VA_ARGS__, asString(err), err)
+#define CLOG_ERROR(fn, err, fmt, ...) CLOG_ERROR_IF(true, fn, err, fmt, ##__VA_ARGS__)
+#define CLOG_IF_ERROR(fn, err, fmt, ...) \
+ CLOG_ERROR_IF((err) != OMX_ErrorNone, fn, err, fmt, ##__VA_ARGS__)
+
+#define CLOGI_(level, fn, fmt, ...) \
+ ALOGI_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+#define CLOGD_(level, fn, fmt, ...) \
+ ALOGD_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_LIFE(fn, fmt, ...) CLOGI_(ADebug::kDebugLifeCycle, fn, fmt, ##__VA_ARGS__)
+#define CLOG_STATE(fn, fmt, ...) CLOGI_(ADebug::kDebugState, fn, fmt, ##__VA_ARGS__)
+#define CLOG_CONFIG(fn, fmt, ...) CLOGI_(ADebug::kDebugConfig, fn, fmt, ##__VA_ARGS__)
+#define CLOG_INTERNAL(fn, fmt, ...) CLOGD_(ADebug::kDebugInternalState, fn, fmt, ##__VA_ARGS__)
+
+#define CLOG_DEBUG_IF(cond, fn, fmt, ...) \
+ ALOGD_IF(cond, #fn "(%x, " fmt ")", mNodeID, ##__VA_ARGS__)
+
+#define CLOG_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+#define CLOG_BUMPED_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG_BUMP >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+
+/* buffer formatting */
+#define BUFFER_FMT(port, fmt, ...) "%s:%u " fmt, portString(port), (port), ##__VA_ARGS__
+#define NEW_BUFFER_FMT(buffer_id, port, fmt, ...) \
+ BUFFER_FMT(port, fmt ") (#%zu => %#x", ##__VA_ARGS__, mActiveBuffers.size(), (buffer_id))
+
+#define SIMPLE_BUFFER(port, size, data) BUFFER_FMT(port, "%zu@%p", (size), (data))
+#define SIMPLE_NEW_BUFFER(buffer_id, port, size, data) \
+ NEW_BUFFER_FMT(buffer_id, port, "%zu@%p", (size), (data))
+
+#define EMPTY_BUFFER(addr, header) "%#x [%u@%p]", \
+ (addr), (header)->nAllocLen, (header)->pBuffer
+#define FULL_BUFFER(addr, header) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld]", \
+ (intptr_t)(addr), (header)->nAllocLen, (header)->pBuffer, \
+ (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp
+
+#define WITH_STATS_WRAPPER(fmt, ...) fmt " { IN=%zu/%zu OUT=%zu/%zu }", ##__VA_ARGS__, \
+ mInputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexInput], \
+ mOutputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexOutput]
+// TRICKY: this is needed so formatting macros expand before substitution
+#define WITH_STATS(fmt, ...) WITH_STATS_WRAPPER(fmt, ##__VA_ARGS__)
+
+template<class T>
+static void InitOMXParams(T *params) {
+ memset(params, 0, sizeof(T));
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
namespace android {
@@ -56,8 +121,8 @@ struct BufferMeta {
}
memcpy((OMX_U8 *)mMem->pointer() + header->nOffset,
- header->pBuffer + header->nOffset,
- header->nFilledLen);
+ header->pBuffer + header->nOffset,
+ header->nFilledLen);
}
void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) {
@@ -66,8 +131,8 @@ struct BufferMeta {
}
memcpy(header->pBuffer + header->nOffset,
- (const OMX_U8 *)mMem->pointer() + header->nOffset,
- header->nFilledLen);
+ (const OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->nFilledLen);
}
void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
@@ -89,8 +154,17 @@ OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = {
&OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
};
+static inline const char *portString(OMX_U32 portIndex) {
+ switch (portIndex) {
+ case kPortIndexInput: return "Input";
+ case kPortIndexOutput: return "Output";
+ case ~0: return "All";
+ default: return "port";
+ }
+}
+
OMXNodeInstance::OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer)
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name)
: mOwner(owner),
mNodeID(0),
mHandle(NULL),
@@ -100,15 +174,25 @@ OMXNodeInstance::OMXNodeInstance(
, mBufferIDCount(0)
#endif
{
+ mName = ADebug::GetDebugName(name);
+ DEBUG = ADebug::GetDebugLevelFromProperty(name, "debug.stagefright.omx-debug");
+ ALOGV("debug level for %s is %d", name, DEBUG);
+ DEBUG_BUMP = DEBUG;
+ mNumPortBuffers[0] = 0;
+ mNumPortBuffers[1] = 0;
+ mDebugLevelBumpPendingBuffers[0] = 0;
+ mDebugLevelBumpPendingBuffers[1] = 0;
}
OMXNodeInstance::~OMXNodeInstance() {
+ free(mName);
CHECK(mHandle == NULL);
}
void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) {
- CHECK(mHandle == NULL);
mNodeID = node_id;
+ CLOG_LIFE(allocateNode, "handle=%p", handle);
+ CHECK(mHandle == NULL);
mHandle = handle;
}
@@ -120,6 +204,7 @@ sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() {
void OMXNodeInstance::setGraphicBufferSource(
const sp<GraphicBufferSource>& bufferSource) {
Mutex::Autolock autoLock(mGraphicBufferSourceLock);
+ CLOG_INTERNAL(setGraphicBufferSource, "%p", bufferSource.get());
mGraphicBufferSource = bufferSource;
}
@@ -140,6 +225,7 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
case OMX_ErrorNone:
return OK;
case OMX_ErrorUnsupportedSetting:
+ case OMX_ErrorUnsupportedIndex:
return ERROR_UNSUPPORTED;
default:
return UNKNOWN_ERROR;
@@ -147,8 +233,14 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
}
status_t OMXNodeInstance::freeNode(OMXMaster *master) {
+ CLOG_LIFE(freeNode, "handle=%p", mHandle);
static int32_t kMaxNumIterations = 10;
+ // exit if we have already freed the node
+ if (mHandle == NULL) {
+ return OK;
+ }
+
// Transition the node from its current state all the way down
// to "Loaded".
// This ensures that all active buffers are properly freed even
@@ -170,10 +262,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateIdle
- && state != OMX_StateInvalid) {
+ && state != OMX_StateIdle
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Idle state, aborting.");
+ CLOGW("failed to enter Idle state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -199,10 +292,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateLoaded
- && state != OMX_StateInvalid) {
+ && state != OMX_StateLoaded
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Loaded state, aborting.");
+ CLOGW("failed to enter Loaded state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -220,20 +314,18 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
break;
default:
- CHECK(!"should not be here, unknown state.");
+ LOG_ALWAYS_FATAL("unknown state %s(%#x).", asString(state), state);
break;
}
- ALOGV("calling destroyComponentInstance");
+ ALOGV("[%x:%s] calling destroyComponentInstance", mNodeID, mName);
OMX_ERRORTYPE err = master->destroyComponentInstance(
static_cast<OMX_COMPONENTTYPE *>(mHandle));
- ALOGV("destroyComponentInstance returned err %d", err);
mHandle = NULL;
-
- if (err != OMX_ErrorNone) {
- ALOGE("FreeHandle FAILED with error 0x%08x.", err);
- }
+ CLOG_IF_ERROR(freeNode, err, "");
+ free(mName);
+ mName = NULL;
mOwner->invalidateNodeID(mNodeID);
mNodeID = 0;
@@ -265,7 +357,17 @@ status_t OMXNodeInstance::sendCommand(
Mutex::Autolock autoLock(mLock);
+ // bump internal-state debug level for 2 input and output frames past a command
+ {
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+
+ const char *paramString =
+ cmd == OMX_CommandStateSet ? asString((OMX_STATETYPE)param) : portString(param);
+ CLOG_STATE(sendCommand, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL);
+ CLOG_IF_ERROR(sendCommand, err, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
return StatusFromOMXError(err);
}
@@ -274,17 +376,23 @@ status_t OMXNodeInstance::getParameter(
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
- ALOGE_IF(err != OMX_ErrorNone, "getParameter(%d) ERROR: %#x", index, err);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getParameter
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setParameter(
- OMX_INDEXTYPE index, const void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setParameter, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetParameter(
mHandle, index, const_cast<void *>(params));
- ALOGE_IF(err != OMX_ErrorNone, "setParameter(%d) ERROR: %#x", index, err);
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
@@ -293,16 +401,23 @@ status_t OMXNodeInstance::getConfig(
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetConfig(mHandle, index, params);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getConfig
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getConfig, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setConfig(
- OMX_INDEXTYPE index, const void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setConfig, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetConfig(
mHandle, index, const_cast<void *>(params));
-
+ CLOG_IF_ERROR(setConfig, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
@@ -310,13 +425,14 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) {
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetState(mHandle, state);
-
+ CLOG_IF_ERROR(getState, err, "");
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::enableGraphicBuffers(
OMX_U32 portIndex, OMX_BOOL enable) {
Mutex::Autolock autoLock(mLock);
+ CLOG_CONFIG(enableGraphicBuffers, "%s:%u, %d", portString(portIndex), portIndex, enable);
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.enableAndroidNativeBuffers");
@@ -324,32 +440,19 @@ status_t OMXNodeInstance::enableGraphicBuffers(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- if (enable) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
- }
-
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- EnableAndroidNativeBuffersParams params = {
- sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
- };
+ EnableAndroidNativeBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.enable = enable;
err = OMX_SetParameter(mHandle, index, &params);
-
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
- err, err);
-
- return UNKNOWN_ERROR;
- }
-
- return OK;
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d", name, index,
+ portString(portIndex), portIndex, enable);
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getGraphicBufferUsage(
@@ -362,26 +465,19 @@ status_t OMXNodeInstance::getGraphicBufferUsage(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- GetAndroidNativeBufferUsageParams params = {
- sizeof(GetAndroidNativeBufferUsageParams), ver, portIndex, 0,
- };
+ GetAndroidNativeBufferUsageParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
err = OMX_GetParameter(mHandle, index, &params);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetAndroidNativeBufferUsage failed with error %d (0x%08x)",
- err, err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u", name, index,
+ portString(portIndex), portIndex);
+ return StatusFromOMXError(err);
}
*usage = params.nUsage;
@@ -393,6 +489,7 @@ status_t OMXNodeInstance::storeMetaDataInBuffers(
OMX_U32 portIndex,
OMX_BOOL enable) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u en:%d", portString(portIndex), portIndex, enable);
return storeMetaDataInBuffers_l(
portIndex, enable,
OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */);
@@ -419,37 +516,42 @@ status_t OMXNodeInstance::storeMetaDataInBuffers_l(
: OMX_ErrorBadParameter;
if (err == OMX_ErrorNone) {
*usingGraphicBufferInMetadata = OMX_TRUE;
+ name = graphicBufferName;
} else {
- *usingGraphicBufferInMetadata = OMX_FALSE;
err = OMX_GetExtensionIndex(mHandle, name, &index);
}
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
- return StatusFromOMXError(err);
- }
-
- StoreMetaDataInBuffersParams params;
- memset(&params, 0, sizeof(params));
- params.nSize = sizeof(params);
+ OMX_ERRORTYPE xerr = err;
+ if (err == OMX_ErrorNone) {
+ StoreMetaDataInBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
- // Version: 1.0.0.0
- params.nVersion.s.nVersionMajor = 1;
+ err = OMX_SetParameter(mHandle, index, &params);
+ }
- params.nPortIndex = portIndex;
- params.bStoreMetaData = enable;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ // don't log loud error if component does not support metadata mode on the output
+ if (err != OMX_ErrorNone) {
*usingGraphicBufferInMetadata = OMX_FALSE;
- return UNKNOWN_ERROR;
+ if (err == OMX_ErrorUnsupportedIndex && portIndex == kPortIndexOutput) {
+ CLOGW("component does not support metadata mode; using fallback");
+ } else if (xerr != OMX_ErrorNone) {
+ CLOG_ERROR(getExtensionIndex, xerr, "%s", name);
+ } else {
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d GB=%d", name, index,
+ portString(portIndex), portIndex, enable, useGraphicBuffer);
+ }
}
- return err;
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth,
OMX_U32 maxFrameHeight) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(prepareForAdaptivePlayback, "%s:%u en=%d max=%ux%u",
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
@@ -457,33 +559,29 @@ status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name);
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
PrepareForAdaptivePlaybackParams params;
- params.nSize = sizeof(params);
- params.nVersion.s.nVersionMajor = 1;
- params.nVersion.s.nVersionMinor = 0;
- params.nVersion.s.nRevision = 0;
- params.nVersion.s.nStep = 0;
-
+ InitOMXParams(&params);
params.nPortIndex = portIndex;
params.bEnable = enable;
params.nMaxFrameWidth = maxFrameWidth;
params.nMaxFrameHeight = maxFrameHeight;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback "
- "with error %d (0x%08x)", err, err);
- return UNKNOWN_ERROR;
- }
- return err;
+
+ err = OMX_SetParameter(mHandle, index, &params);
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d max=%ux%u", name, index,
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::configureVideoTunnelMode(
OMX_U32 portIndex, OMX_BOOL tunneled, OMX_U32 audioHwSync,
native_handle_t **sidebandHandle) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(configureVideoTunnelMode, "%s:%u tun=%d sync=%u",
+ portString(portIndex), portIndex, tunneled, audioHwSync);
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
@@ -491,36 +589,33 @@ status_t OMXNodeInstance::configureVideoTunnelMode(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("configureVideoTunnelMode extension is missing!");
+ CLOG_ERROR_IF(tunneled, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
ConfigureVideoTunnelModeParams tunnelParams;
- tunnelParams.nSize = sizeof(tunnelParams);
- tunnelParams.nVersion.s.nVersionMajor = 1;
- tunnelParams.nVersion.s.nVersionMinor = 0;
- tunnelParams.nVersion.s.nRevision = 0;
- tunnelParams.nVersion.s.nStep = 0;
-
+ InitOMXParams(&tunnelParams);
tunnelParams.nPortIndex = portIndex;
tunnelParams.bTunneled = tunneled;
tunnelParams.nAudioHwSync = audioHwSync;
err = OMX_SetParameter(mHandle, index, &tunnelParams);
if (err != OMX_ErrorNone) {
- ALOGE("configureVideoTunnelMode failed! (err %d).", err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
}
err = OMX_GetParameter(mHandle, index, &tunnelParams);
if (err != OMX_ErrorNone) {
- ALOGE("GetVideoTunnelWindow failed! (err %d).", err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
}
if (sidebandHandle) {
*sidebandHandle = (native_handle_t*)tunnelParams.pSidebandWindow;
}
- return err;
+ return OK;
}
status_t OMXNodeInstance::useBuffer(
@@ -537,14 +632,14 @@ status_t OMXNodeInstance::useBuffer(
params->size(), static_cast<OMX_U8 *>(params->pointer()));
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -558,6 +653,8 @@ status_t OMXNodeInstance::useBuffer(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%zu@%p", params->size(), params->pointer()));
return OK;
}
@@ -567,17 +664,14 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
// port definition
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, OMX_IndexParamPortDefinition, &def);
- if (err != OMX_ErrorNone)
- {
- ALOGE("%s::%d:Error getting OMX_IndexParamPortDefinition", __FUNCTION__, __LINE__);
- return err;
+ if (err != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
}
BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
@@ -595,11 +689,11 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
bufferHandle);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, BUFFER_FMT(portIndex, "%u@%p", def.nBufferSize, bufferHandle));
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pBuffer, bufferHandle);
@@ -608,7 +702,8 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
*buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer2, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%u@%p", def.nBufferSize, bufferHandle));
return OK;
}
@@ -632,10 +727,8 @@ status_t OMXNodeInstance::useGraphicBuffer(
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.useAndroidNativeBuffer");
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
@@ -656,15 +749,15 @@ status_t OMXNodeInstance::useGraphicBuffer(
err = OMX_SetParameter(mHandle, index, &params);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
- err);
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u meta=%p GB=%p", name, index,
+ portString(portIndex), portIndex, bufferMeta, graphicBuffer->handle);
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, bufferMeta);
@@ -672,12 +765,13 @@ status_t OMXNodeInstance::useGraphicBuffer(
*buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "GB=%p", graphicBuffer->handle));
return OK;
}
status_t OMXNodeInstance::updateGraphicBufferInMeta(
- OMX_U32 /* portIndex */, const sp<GraphicBuffer>& graphicBuffer,
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
@@ -688,7 +782,8 @@ status_t OMXNodeInstance::updateGraphicBufferInMeta(
bufferMeta->setGraphicBuffer(graphicBuffer);
metadata->eType = kMetadataBufferTypeGrallocSource;
metadata->pHandle = graphicBuffer->handle;
-
+ CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x := %p",
+ portString(portIndex), portIndex, buffer, graphicBuffer->handle);
return OK;
}
@@ -714,19 +809,21 @@ status_t OMXNodeInstance::createInputSurface(
// Retrieve the width and height of the graphic buffer, set when the
// codec was configured.
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(def);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE oerr = OMX_GetParameter(
mHandle, OMX_IndexParamPortDefinition, &def);
- CHECK(oerr == OMX_ErrorNone);
+ if (oerr != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, oerr, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
+ }
if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) {
- ALOGE("createInputSurface requires COLOR_FormatSurface "
- "(AndroidOpaque) color format");
+ CLOGW("createInputSurface requires COLOR_FormatSurface "
+ "(AndroidOpaque) color format instead of %s(%#x)",
+ asString(def.format.video.eColorFormat), def.format.video.eColorFormat);
return INVALID_OPERATION;
}
@@ -749,9 +846,9 @@ status_t OMXNodeInstance::signalEndOfInputStream() {
// flag set). Seems easier than doing the equivalent from here.
sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
if (bufferSource == NULL) {
- ALOGW("signalEndOfInputStream can only be used with Surface input");
+ CLOGW("signalEndOfInputStream can only be used with Surface input");
return INVALID_OPERATION;
- };
+ }
return bufferSource->signalEndOfInputStream();
}
@@ -768,14 +865,13 @@ status_t OMXNodeInstance::allocateBuffer(
mHandle, &header, portIndex, buffer_meta, size);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBuffer, err, BUFFER_FMT(portIndex, "%zu@", size));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -789,6 +885,7 @@ status_t OMXNodeInstance::allocateBuffer(
if (bufferSource != NULL && portIndex == kPortIndexInput) {
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBuffer, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p", size, *buffer_data));
return OK;
}
@@ -806,14 +903,14 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
mHandle, &header, portIndex, buffer_meta, params->size());
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBufferWithBackup, err,
+ SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
@@ -827,12 +924,16 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %p",
+ params->size(), params->pointer(), header->pBuffer));
+
return OK;
}
status_t OMXNodeInstance::freeBuffer(
OMX_U32 portIndex, OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
+ CLOG_BUFFER(freeBuffer, "%s:%u %#x", portString(portIndex), portIndex, buffer);
removeActiveBuffer(portIndex, buffer);
@@ -840,6 +941,7 @@ status_t OMXNodeInstance::freeBuffer(
BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate);
OMX_ERRORTYPE err = OMX_FreeBuffer(mHandle, portIndex, header);
+ CLOG_IF_ERROR(freeBuffer, err, "%s:%u %#x", portString(portIndex), portIndex, buffer);
delete buffer_meta;
buffer_meta = NULL;
@@ -856,8 +958,18 @@ status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {
header->nOffset = 0;
header->nFlags = 0;
- OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.add(header);
+ CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header)));
+ }
+ OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header));
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(header);
+ }
return StatusFromOMXError(err);
}
@@ -870,14 +982,66 @@ status_t OMXNodeInstance::emptyBuffer(
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(header->pAppPrivate);
buffer_meta->CopyToOMX(header);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer);
+}
+
+// log queued buffer activity for the next few input and/or output frames
+// if logging at internal state level
+void OMXNodeInstance::bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers) {
+ if (DEBUG == ADebug::kDebugInternalState) {
+ DEBUG_BUMP = ADebug::kDebugAll;
+ if (numInputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexInput] = numInputBuffers;
+ }
+ if (numOutputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexOutput] = numOutputBuffers;
+ }
+ }
+}
+
+void OMXNodeInstance::unbumpDebugLevel_l(size_t portIndex) {
+ if (mDebugLevelBumpPendingBuffers[portIndex]) {
+ --mDebugLevelBumpPendingBuffers[portIndex];
+ }
+ if (!mDebugLevelBumpPendingBuffers[0]
+ && !mDebugLevelBumpPendingBuffers[1]) {
+ DEBUG_BUMP = DEBUG;
+ }
+}
+
+status_t OMXNodeInstance::emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr) {
+ header->nFlags = flags;
+ header->nTimeStamp = timestamp;
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.add(header);
+
+ // bump internal-state debug level for 2 input frames past a buffer with CSD
+ if ((flags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ bumpDebugLevel_l(2 /* numInputBuffers */, 0 /* numOutputBuffers */);
+ }
+
+ CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header)));
+ }
+
OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
+ CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header));
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ if (err != OMX_ErrorNone) {
+ mInputBuffersWithCodec.remove(header);
+ } else if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ unbumpDebugLevel_l(kPortIndexInput);
+ }
+ }
return StatusFromOMXError(err);
}
@@ -891,15 +1055,8 @@ status_t OMXNodeInstance::emptyDirectBuffer(
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
-
- OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
- if (err != OMX_ErrorNone) {
- ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err);
- }
- return StatusFromOMXError(err);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer);
}
status_t OMXNodeInstance::getExtensionIndex(
@@ -912,11 +1069,25 @@ status_t OMXNodeInstance::getExtensionIndex(
return StatusFromOMXError(err);
}
+inline static const char *asString(IOMX::InternalOptionType i, const char *def = "??") {
+ switch (i) {
+ case IOMX::INTERNAL_OPTION_SUSPEND: return "SUSPEND";
+ case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
+ return "REPEAT_PREVIOUS_FRAME_DELAY";
+ case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP";
+ case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME";
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE";
+ default: return def;
+ }
+}
+
status_t OMXNodeInstance::setInternalOption(
OMX_U32 portIndex,
IOMX::InternalOptionType type,
const void *data,
size_t size) {
+ CLOG_CONFIG(setInternalOption, "%s(%d): %s:%u %zu@%p",
+ asString(type), type, portString(portIndex), portIndex, size, data);
switch (type) {
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
@@ -928,6 +1099,7 @@ status_t OMXNodeInstance::setInternalOption(
getGraphicBufferSource();
if (bufferSource == NULL || portIndex != kPortIndexInput) {
+ CLOGW("setInternalOption is only for Surface input");
return ERROR_UNSUPPORTED;
}
@@ -937,6 +1109,7 @@ status_t OMXNodeInstance::setInternalOption(
}
bool suspend = *(bool *)data;
+ CLOG_CONFIG(setInternalOption, "suspend=%d", suspend);
bufferSource->suspend(suspend);
} else if (type ==
IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY){
@@ -945,7 +1118,7 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t delayUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "delayUs=%lld", (long long)delayUs);
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
} else if (type ==
IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
@@ -954,7 +1127,7 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t maxGapUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
return bufferSource->setMaxTimestampGapUs(maxGapUs);
} else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
if (size != sizeof(int64_t)) {
@@ -962,13 +1135,18 @@ status_t OMXNodeInstance::setInternalOption(
}
int64_t skipFramesBeforeUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "beforeUs=%lld", (long long)skipFramesBeforeUs);
bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
} else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
if (size != sizeof(int64_t) * 2) {
return INVALID_OPERATION;
}
+ int64_t timePerFrameUs = ((int64_t *)data)[0];
+ int64_t timePerCaptureUs = ((int64_t *)data)[1];
+ CLOG_CONFIG(setInternalOption, "perFrameUs=%lld perCaptureUs=%lld",
+ (long long)timePerFrameUs, (long long)timePerCaptureUs);
+
bufferSource->setTimeLapseUs((int64_t *)data);
}
@@ -987,6 +1165,16 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
OMX_BUFFERHEADERTYPE *buffer =
findBufferHeader(msg.u.extended_buffer_data.buffer);
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ FBD, WITH_STATS(FULL_BUFFER(msg.u.extended_buffer_data.buffer, buffer)));
+
+ unbumpDebugLevel_l(kPortIndexOutput);
+ }
+
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(buffer->pAppPrivate);
@@ -1002,16 +1190,23 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
return;
}
} else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
+ OMX_BUFFERHEADERTYPE *buffer =
+ findBufferHeader(msg.u.buffer_data.buffer);
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer)));
+ }
+
if (bufferSource != NULL) {
// This is one of the buffers used exclusively by
// GraphicBufferSource.
// Don't dispatch a message back to ACodec, since it doesn't
// know that anyone asked to have the buffer emptied and will
// be very confused.
-
- OMX_BUFFERHEADERTYPE *buffer =
- findBufferHeader(msg.u.buffer_data.buffer);
-
bufferSource->codecBufferEmptied(buffer);
return;
}
@@ -1035,6 +1230,43 @@ void OMXNodeInstance::onGetHandleFailed() {
// Don't try to acquire mLock here -- in rare circumstances this will hang.
void OMXNodeInstance::onEvent(
OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
+ const char *arg1String = "??";
+ const char *arg2String = "??";
+ ADebug::Level level = ADebug::kDebugInternalState;
+
+ switch (event) {
+ case OMX_EventCmdComplete:
+ arg1String = asString((OMX_COMMANDTYPE)arg1);
+ switch (arg1) {
+ case OMX_CommandStateSet:
+ arg2String = asString((OMX_STATETYPE)arg2);
+ level = ADebug::kDebugState;
+ break;
+ case OMX_CommandFlush:
+ case OMX_CommandPortEnable:
+ {
+ // bump internal-state debug level for 2 input and output frames
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+ // fall through
+ default:
+ arg2String = portString(arg2);
+ }
+ break;
+ case OMX_EventError:
+ arg1String = asString((OMX_ERRORTYPE)arg1);
+ level = ADebug::kDebugLifeCycle;
+ break;
+ case OMX_EventPortSettingsChanged:
+ arg2String = asString((OMX_INDEXEXTTYPE)arg2);
+ // fall through
+ default:
+ arg1String = portString(arg1);
+ }
+
+ CLOGI_(level, onEvent, "%s(%x), %s(%x), %s(%x)",
+ asString(event), event, arg1String, arg1, arg2String, arg2);
const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
if (bufferSource != NULL
@@ -1092,23 +1324,27 @@ void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
active.mPortIndex = portIndex;
active.mID = id;
mActiveBuffers.push(active);
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ ++mNumPortBuffers[portIndex];
+ }
}
void OMXNodeInstance::removeActiveBuffer(
OMX_U32 portIndex, OMX::buffer_id id) {
- bool found = false;
for (size_t i = 0; i < mActiveBuffers.size(); ++i) {
if (mActiveBuffers[i].mPortIndex == portIndex
- && mActiveBuffers[i].mID == id) {
- found = true;
+ && mActiveBuffers[i].mID == id) {
mActiveBuffers.removeItemsAt(i);
- break;
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ --mNumPortBuffers[portIndex];
+ }
+ return;
}
}
- if (!found) {
- ALOGW("Attempt to remove an active buffer we know nothing about...");
- }
+ CLOGW("Attempt to remove an active buffer [%#x] we know nothing about...", id);
}
void OMXNodeInstance::freeActiveBuffers() {
@@ -1129,7 +1365,7 @@ OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader)
OMX::buffer_id buffer;
do { // handle the very unlikely case of ID overflow
if (++mBufferIDCount == 0) {
- ++mBufferIDCount;
+ ++mBufferIDCount;
}
buffer = (OMX::buffer_id)mBufferIDCount;
} while (mBufferIDToBufferHeader.indexOfKey(buffer) >= 0);
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
index 646cd32..df978f8 100644
--- a/media/libstagefright/omx/SoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -283,7 +283,7 @@ OMX_ERRORTYPE SoftOMXComponent::setConfig(
OMX_ERRORTYPE SoftOMXComponent::getExtensionIndex(
const char * /* name */, OMX_INDEXTYPE * /* index */) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorUnsupportedIndex;
}
OMX_ERRORTYPE SoftOMXComponent::useBuffer(
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
index f2825dd..43e0269 100644
--- a/media/libstagefright/tests/Utils_test.cpp
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -24,6 +24,7 @@
#include <unistd.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AStringUtils.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/Utils.h>
@@ -32,6 +33,100 @@ namespace android {
class UtilsTest : public ::testing::Test {
};
+TEST_F(UtilsTest, TestStringUtils) {
+ ASSERT_EQ(AStringUtils::Compare("Audio", "AudioExt", 5, false), 0);
+ ASSERT_EQ(AStringUtils::Compare("Audio", "audiOExt", 5, true), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "audioExt", 5, false), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "AudiOExt", 5, false), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "AudioExt", 7, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio", "audiOExt", 7, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("AudioExt", "Audio", 7, false), 0);
+ ASSERT_GT(AStringUtils::Compare("audiOext", "Audio", 7, true), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "Video", 5, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio1", "Audio2", 6, false), 0);
+ ASSERT_LT(AStringUtils::Compare("audio", "VIDEO", 5, true), 0);
+ ASSERT_LT(AStringUtils::Compare("audio1", "AUDIO2", 6, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("Video", "Audio", 5, false), 0);
+ ASSERT_GT(AStringUtils::Compare("Audio2", "Audio1", 6, false), 0);
+ ASSERT_GT(AStringUtils::Compare("VIDEO", "audio", 5, true), 0);
+ ASSERT_GT(AStringUtils::Compare("AUDIO2", "audio1", 6, true), 0);
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "AudioB", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, false));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring1", 5, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring2", 5, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring4", 5, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring5", 5, "StrinG8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*1", 4, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*2", 4, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*3", 4, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*4", 4, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*5", 4, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*6", 4, "AString8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng1", 6, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng2", 6, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng3", 6, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng4", 6, "StriNg8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng5", 6, "StrinG8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng6", 6, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestroom", 9, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestrestroom", 13, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTReSTRoom", 9, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTRestreSTRoom", 13, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestreSTReSTRoom", 14, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestreSTReSTRoom", 14, true));
+}
+
+TEST_F(UtilsTest, TestDebug) {
+#define LVL(x) (ADebug::Level)(x)
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "", LVL(5)), LVL(5));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", " \t \n ", LVL(2)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video", "\t\n 3 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo,2:vid*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "avideo", "\t\n 3 \t\n:\t\n avideo \t\n,\t\n 2 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "audio.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));
+#undef LVL
+}
+
TEST_F(UtilsTest, TestFourCC) {
ASSERT_EQ(FOURCC('s', 't', 'm' , 'u'), 'stmu');
}
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index e6e19e3..052b700 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -51,104 +51,178 @@ void MtpDataPacket::setTransactionID(MtpTransactionID id) {
MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
}
-uint16_t MtpDataPacket::getUInt16() {
+bool MtpDataPacket::getUInt8(uint8_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
+ value = mBuffer[mOffset++];
+ return true;
+}
+
+bool MtpDataPacket::getUInt16(uint16_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
- mOffset += 2;
- return result;
+ value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += sizeof(value);
+ return true;
}
-uint32_t MtpDataPacket::getUInt32() {
+bool MtpDataPacket::getUInt32(uint32_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
- mOffset += 4;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-uint64_t MtpDataPacket::getUInt64() {
+bool MtpDataPacket::getUInt64(uint64_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
- mOffset += 8;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-void MtpDataPacket::getUInt128(uint128_t& value) {
- value[0] = getUInt32();
- value[1] = getUInt32();
- value[2] = getUInt32();
- value[3] = getUInt32();
+bool MtpDataPacket::getUInt128(uint128_t& value) {
+ return getUInt32(value[0]) && getUInt32(value[1]) && getUInt32(value[2]) && getUInt32(value[3]);
}
-void MtpDataPacket::getString(MtpStringBuffer& string)
+bool MtpDataPacket::getString(MtpStringBuffer& string)
{
- string.readFromPacket(this);
+ return string.readFromPacket(this);
}
Int8List* MtpDataPacket::getAInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int8List* result = new Int8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ int8_t value;
+ if (!getInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt8List* MtpDataPacket::getAUInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt8List* result = new UInt8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ uint8_t value;
+ if (!getUInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int16List* MtpDataPacket::getAInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int16List* result = new Int16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ int16_t value;
+ if (!getInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt16List* MtpDataPacket::getAUInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt16List* result = new UInt16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ uint16_t value;
+ if (!getUInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int32List* MtpDataPacket::getAInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int32List* result = new Int32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ int32_t value;
+ if (!getInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt32List* MtpDataPacket::getAUInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt32List* result = new UInt32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ uint32_t value;
+ if (!getUInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int64List* MtpDataPacket::getAInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int64List* result = new Int64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ int64_t value;
+ if (!getInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt64List* MtpDataPacket::getAUInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt64List* result = new UInt64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ uint64_t value;
+ if (!getUInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
index 2b81063..13d3bd9 100644
--- a/media/mtp/MtpDataPacket.h
+++ b/media/mtp/MtpDataPacket.h
@@ -30,7 +30,7 @@ class MtpStringBuffer;
class MtpDataPacket : public MtpPacket {
private:
// current offset for get/put methods
- int mOffset;
+ size_t mOffset;
public:
MtpDataPacket();
@@ -42,17 +42,18 @@ public:
void setTransactionID(MtpTransactionID id);
inline const uint8_t* getData() const { return mBuffer + MTP_CONTAINER_HEADER_SIZE; }
- inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
- inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
- uint16_t getUInt16();
- inline int16_t getInt16() { return (int16_t)getUInt16(); }
- uint32_t getUInt32();
- inline int32_t getInt32() { return (int32_t)getUInt32(); }
- uint64_t getUInt64();
- inline int64_t getInt64() { return (int64_t)getUInt64(); }
- void getUInt128(uint128_t& value);
- inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
- void getString(MtpStringBuffer& string);
+
+ bool getUInt8(uint8_t& value);
+ inline bool getInt8(int8_t& value) { return getUInt8((uint8_t&)value); }
+ bool getUInt16(uint16_t& value);
+ inline bool getInt16(int16_t& value) { return getUInt16((uint16_t&)value); }
+ bool getUInt32(uint32_t& value);
+ inline bool getInt32(int32_t& value) { return getUInt32((uint32_t&)value); }
+ bool getUInt64(uint64_t& value);
+ inline bool getInt64(int64_t& value) { return getUInt64((uint64_t&)value); }
+ bool getUInt128(uint128_t& value);
+ inline bool getInt128(int128_t& value) { return getUInt128((uint128_t&)value); }
+ bool getString(MtpStringBuffer& string);
Int8List* getAInt8();
UInt8List* getAUInt8();
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index 96331b5..3eafd6f 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -322,8 +322,10 @@ MtpDeviceInfo* MtpDevice::getDeviceInfo() {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpDeviceInfo* info = new MtpDeviceInfo;
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -355,8 +357,10 @@ MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpStorageInfo* info = new MtpStorageInfo(storageID);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -394,8 +398,10 @@ MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpObjectInfo* info = new MtpObjectInfo(handle);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -556,8 +562,10 @@ MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
@@ -575,15 +583,17 @@ MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectForma
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
bool MtpDevice::readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset, int length, void* clientData),
- int objectSize, void* clientData) {
+ size_t objectSize, void* clientData) {
Mutex::Autolock autoLock(mMutex);
bool result = false;
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index b69203e..9b0acbf 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -98,7 +98,7 @@ public:
bool readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset,
int length, void* clientData),
- int objectSize, void* clientData);
+ size_t objectSize, void* clientData);
bool readObject(MtpObjectHandle handle, const char* destPath, int group,
int perm);
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
index 108e2b8..3e1dff7 100644
--- a/media/mtp/MtpDeviceInfo.cpp
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -28,7 +28,7 @@ MtpDeviceInfo::MtpDeviceInfo()
mVendorExtensionID(0),
mVendorExtensionVersion(0),
mVendorExtensionDesc(NULL),
- mFunctionalCode(0),
+ mFunctionalMode(0),
mOperations(NULL),
mEvents(NULL),
mDeviceProperties(NULL),
@@ -59,39 +59,46 @@ MtpDeviceInfo::~MtpDeviceInfo() {
free(mSerial);
}
-void MtpDeviceInfo::read(MtpDataPacket& packet) {
+bool MtpDeviceInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStandardVersion = packet.getUInt16();
- mVendorExtensionID = packet.getUInt32();
- mVendorExtensionVersion = packet.getUInt16();
+ if (!packet.getUInt16(mStandardVersion)) return false;
+ if (!packet.getUInt32(mVendorExtensionID)) return false;
+ if (!packet.getUInt16(mVendorExtensionVersion)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVendorExtensionDesc = strdup((const char *)string);
- mFunctionalCode = packet.getUInt16();
+ if (!packet.getUInt16(mFunctionalMode)) return false;
mOperations = packet.getAUInt16();
+ if (!mOperations) return false;
mEvents = packet.getAUInt16();
+ if (!mEvents) return false;
mDeviceProperties = packet.getAUInt16();
+ if (!mDeviceProperties) return false;
mCaptureFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
mPlaybackFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mManufacturer = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mModel = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVersion = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mSerial = strdup((const char *)string);
+
+ return true;
}
void MtpDeviceInfo::print() {
ALOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
- ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
- mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+ ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalMode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalMode, mManufacturer, mModel, mVersion, mSerial);
}
} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
index 2abaa10..bcda9a5 100644
--- a/media/mtp/MtpDeviceInfo.h
+++ b/media/mtp/MtpDeviceInfo.h
@@ -29,7 +29,7 @@ public:
uint32_t mVendorExtensionID;
uint16_t mVendorExtensionVersion;
char* mVendorExtensionDesc;
- uint16_t mFunctionalCode;
+ uint16_t mFunctionalMode;
UInt16List* mOperations;
UInt16List* mEvents;
MtpDevicePropertyList* mDeviceProperties;
@@ -44,7 +44,7 @@ public:
MtpDeviceInfo();
virtual ~MtpDeviceInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
index cd15343..0573104 100644
--- a/media/mtp/MtpObjectInfo.cpp
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -55,39 +55,41 @@ MtpObjectInfo::~MtpObjectInfo() {
free(mKeywords);
}
-void MtpObjectInfo::read(MtpDataPacket& packet) {
+bool MtpObjectInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
time_t time;
- mStorageID = packet.getUInt32();
- mFormat = packet.getUInt16();
- mProtectionStatus = packet.getUInt16();
- mCompressedSize = packet.getUInt32();
- mThumbFormat = packet.getUInt16();
- mThumbCompressedSize = packet.getUInt32();
- mThumbPixWidth = packet.getUInt32();
- mThumbPixHeight = packet.getUInt32();
- mImagePixWidth = packet.getUInt32();
- mImagePixHeight = packet.getUInt32();
- mImagePixDepth = packet.getUInt32();
- mParent = packet.getUInt32();
- mAssociationType = packet.getUInt16();
- mAssociationDesc = packet.getUInt32();
- mSequenceNumber = packet.getUInt32();
+ if (!packet.getUInt32(mStorageID)) return false;
+ if (!packet.getUInt16(mFormat)) return false;
+ if (!packet.getUInt16(mProtectionStatus)) return false;
+ if (!packet.getUInt32(mCompressedSize)) return false;
+ if (!packet.getUInt16(mThumbFormat)) return false;
+ if (!packet.getUInt32(mThumbCompressedSize)) return false;
+ if (!packet.getUInt32(mThumbPixWidth)) return false;
+ if (!packet.getUInt32(mThumbPixHeight)) return false;
+ if (!packet.getUInt32(mImagePixWidth)) return false;
+ if (!packet.getUInt32(mImagePixHeight)) return false;
+ if (!packet.getUInt32(mImagePixDepth)) return false;
+ if (!packet.getUInt32(mParent)) return false;
+ if (!packet.getUInt16(mAssociationType)) return false;
+ if (!packet.getUInt32(mAssociationDesc)) return false;
+ if (!packet.getUInt32(mSequenceNumber)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mName = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateCreated = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateModified = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mKeywords = strdup((const char *)string);
+
+ return true;
}
void MtpObjectInfo::print() {
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
index c7a449c..86780f1 100644
--- a/media/mtp/MtpObjectInfo.h
+++ b/media/mtp/MtpObjectInfo.h
@@ -50,7 +50,7 @@ public:
MtpObjectInfo(MtpObjectHandle handle);
virtual ~MtpObjectInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index dd07843..bab1335 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -52,7 +52,7 @@ void MtpPacket::reset() {
memset(mBuffer, 0, mBufferSize);
}
-void MtpPacket::allocate(int length) {
+void MtpPacket::allocate(size_t length) {
if (length > mBufferSize) {
int newLength = length + mAllocationIncrement;
mBuffer = (uint8_t *)realloc(mBuffer, newLength);
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
index 0ffb1d3..037722a 100644
--- a/media/mtp/MtpPacket.h
+++ b/media/mtp/MtpPacket.h
@@ -28,11 +28,11 @@ class MtpPacket {
protected:
uint8_t* mBuffer;
// current size of the buffer
- int mBufferSize;
+ size_t mBufferSize;
// number of bytes to add when resizing the buffer
- int mAllocationIncrement;
+ size_t mAllocationIncrement;
// size of the data in the packet
- int mPacketSize;
+ size_t mPacketSize;
public:
MtpPacket(int bufferSize);
@@ -41,7 +41,7 @@ public:
// sets packet size to the default container size and sets buffer to zero
virtual void reset();
- void allocate(int length);
+ void allocate(size_t length);
void dump();
void copyFrom(const MtpPacket& src);
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index c500901..d58e2a4 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -106,15 +106,15 @@ MtpProperty::~MtpProperty() {
free(mMinimumValue.str);
free(mMaximumValue.str);
if (mDefaultArrayValues) {
- for (int i = 0; i < mDefaultArrayLength; i++)
+ for (uint32_t i = 0; i < mDefaultArrayLength; i++)
free(mDefaultArrayValues[i].str);
}
if (mCurrentArrayValues) {
- for (int i = 0; i < mCurrentArrayLength; i++)
+ for (uint32_t i = 0; i < mCurrentArrayLength; i++)
free(mCurrentArrayValues[i].str);
}
if (mEnumValues) {
- for (int i = 0; i < mEnumLength; i++)
+ for (uint16_t i = 0; i < mEnumLength; i++)
free(mEnumValues[i].str);
}
}
@@ -123,11 +123,14 @@ MtpProperty::~MtpProperty() {
delete[] mEnumValues;
}
-void MtpProperty::read(MtpDataPacket& packet) {
- mCode = packet.getUInt16();
+bool MtpProperty::read(MtpDataPacket& packet) {
+ uint8_t temp8;
+
+ if (!packet.getUInt16(mCode)) return false;
bool deviceProp = isDeviceProperty();
- mType = packet.getUInt16();
- mWriteable = (packet.getUInt8() == 1);
+ if (!packet.getUInt16(mType)) return false;
+ if (!packet.getUInt8(temp8)) return false;
+ mWriteable = (temp8 == 1);
switch (mType) {
case MTP_TYPE_AINT8:
case MTP_TYPE_AUINT8:
@@ -140,28 +143,36 @@ void MtpProperty::read(MtpDataPacket& packet) {
case MTP_TYPE_AINT128:
case MTP_TYPE_AUINT128:
mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
- if (deviceProp)
+ if (!mDefaultArrayValues) return false;
+ if (deviceProp) {
mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ if (!mCurrentArrayValues) return false;
+ }
break;
default:
- readValue(packet, mDefaultValue);
- if (deviceProp)
- readValue(packet, mCurrentValue);
+ if (!readValue(packet, mDefaultValue)) return false;
+ if (deviceProp) {
+ if (!readValue(packet, mCurrentValue)) return false;
+ }
}
- if (!deviceProp)
- mGroupCode = packet.getUInt32();
- mFormFlag = packet.getUInt8();
+ if (!deviceProp) {
+ if (!packet.getUInt32(mGroupCode)) return false;
+ }
+ if (!packet.getUInt8(mFormFlag)) return false;
if (mFormFlag == kFormRange) {
- readValue(packet, mMinimumValue);
- readValue(packet, mMaximumValue);
- readValue(packet, mStepSize);
+ if (!readValue(packet, mMinimumValue)) return false;
+ if (!readValue(packet, mMaximumValue)) return false;
+ if (!readValue(packet, mStepSize)) return false;
} else if (mFormFlag == kFormEnum) {
- mEnumLength = packet.getUInt16();
+ if (!packet.getUInt16(mEnumLength)) return false;
mEnumValues = new MtpPropertyValue[mEnumLength];
- for (int i = 0; i < mEnumLength; i++)
- readValue(packet, mEnumValues[i]);
+ for (int i = 0; i < mEnumLength; i++) {
+ if (!readValue(packet, mEnumValues[i])) return false;
+ }
}
+
+ return true;
}
void MtpProperty::write(MtpDataPacket& packet) {
@@ -409,57 +420,59 @@ void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
}
}
-void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+bool MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
MtpStringBuffer stringBuffer;
switch (mType) {
case MTP_TYPE_INT8:
case MTP_TYPE_AINT8:
- value.u.i8 = packet.getInt8();
+ if (!packet.getInt8(value.u.i8)) return false;
break;
case MTP_TYPE_UINT8:
case MTP_TYPE_AUINT8:
- value.u.u8 = packet.getUInt8();
+ if (!packet.getUInt8(value.u.u8)) return false;
break;
case MTP_TYPE_INT16:
case MTP_TYPE_AINT16:
- value.u.i16 = packet.getInt16();
+ if (!packet.getInt16(value.u.i16)) return false;
break;
case MTP_TYPE_UINT16:
case MTP_TYPE_AUINT16:
- value.u.u16 = packet.getUInt16();
+ if (!packet.getUInt16(value.u.u16)) return false;
break;
case MTP_TYPE_INT32:
case MTP_TYPE_AINT32:
- value.u.i32 = packet.getInt32();
+ if (!packet.getInt32(value.u.i32)) return false;
break;
case MTP_TYPE_UINT32:
case MTP_TYPE_AUINT32:
- value.u.u32 = packet.getUInt32();
+ if (!packet.getUInt32(value.u.u32)) return false;
break;
case MTP_TYPE_INT64:
case MTP_TYPE_AINT64:
- value.u.i64 = packet.getInt64();
+ if (!packet.getInt64(value.u.i64)) return false;
break;
case MTP_TYPE_UINT64:
case MTP_TYPE_AUINT64:
- value.u.u64 = packet.getUInt64();
+ if (!packet.getUInt64(value.u.u64)) return false;
break;
case MTP_TYPE_INT128:
case MTP_TYPE_AINT128:
- packet.getInt128(value.u.i128);
+ if (!packet.getInt128(value.u.i128)) return false;
break;
case MTP_TYPE_UINT128:
case MTP_TYPE_AUINT128:
- packet.getUInt128(value.u.u128);
+ if (!packet.getUInt128(value.u.u128)) return false;
break;
case MTP_TYPE_STR:
- packet.getString(stringBuffer);
+ if (!packet.getString(stringBuffer)) return false;
value.str = strdup(stringBuffer);
break;
default:
ALOGE("unknown type %04X in MtpProperty::readValue", mType);
+ return false;
}
+ return true;
}
void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
@@ -517,8 +530,9 @@ void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
}
}
-MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
- length = packet.getUInt32();
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, uint32_t& length) {
+ if (!packet.getUInt32(length)) return NULL;
+
// Fail if resulting array is over 2GB. This is because the maximum array
// size may be less than SIZE_MAX on some platforms.
if ( CC_UNLIKELY(
@@ -528,14 +542,17 @@ MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& lengt
return NULL;
}
MtpPropertyValue* result = new MtpPropertyValue[length];
- for (int i = 0; i < length; i++)
- readValue(packet, result[i]);
+ for (uint32_t i = 0; i < length; i++)
+ if (!readValue(packet, result[i])) {
+ delete result;
+ return NULL;
+ }
return result;
}
-void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, uint32_t length) {
packet.putUInt32(length);
- for (int i = 0; i < length; i++)
+ for (uint32_t i = 0; i < length; i++)
writeValue(packet, values[i]);
}
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index 06ca56e..2e2ead1 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -49,9 +49,9 @@ public:
MtpPropertyValue mCurrentValue;
// for array types
- int mDefaultArrayLength;
+ uint32_t mDefaultArrayLength;
MtpPropertyValue* mDefaultArrayValues;
- int mCurrentArrayLength;
+ uint32_t mCurrentArrayLength;
MtpPropertyValue* mCurrentArrayValues;
enum {
@@ -70,7 +70,7 @@ public:
MtpPropertyValue mStepSize;
// for enum form
- int mEnumLength;
+ uint16_t mEnumLength;
MtpPropertyValue* mEnumValues;
public:
@@ -83,7 +83,7 @@ public:
inline MtpPropertyCode getPropertyCode() const { return mCode; }
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void write(MtpDataPacket& packet);
void setDefaultValue(const uint16_t* string);
@@ -102,11 +102,11 @@ public:
}
private:
- void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ bool readValue(MtpDataPacket& packet, MtpPropertyValue& value);
void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
- MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, uint32_t& length);
void writeArrayValues(MtpDataPacket& packet,
- MtpPropertyValue* values, int length);
+ MtpPropertyValue* values, uint32_t length);
};
}; // namespace android
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
index 0e58e01..40b11b0 100644
--- a/media/mtp/MtpRequestPacket.cpp
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -27,7 +27,8 @@
namespace android {
MtpRequestPacket::MtpRequestPacket()
- : MtpPacket(512)
+ : MtpPacket(512),
+ mParameterCount(0)
{
}
@@ -37,10 +38,21 @@ MtpRequestPacket::~MtpRequestPacket() {
#ifdef MTP_DEVICE
int MtpRequestPacket::read(int fd) {
int ret = ::read(fd, mBuffer, mBufferSize);
- if (ret >= 0)
+ if (ret < 0) {
+ // file read error
+ return ret;
+ }
+
+ // request packet should have 12 byte header followed by 0 to 5 32-bit arguments
+ if (ret >= MTP_CONTAINER_HEADER_SIZE
+ && ret <= MTP_CONTAINER_HEADER_SIZE + 5 * sizeof(uint32_t)
+ && ((ret - MTP_CONTAINER_HEADER_SIZE) & 3) == 0) {
mPacketSize = ret;
- else
- mPacketSize = 0;
+ mParameterCount = (ret - MTP_CONTAINER_HEADER_SIZE) / sizeof(uint32_t);
+ } else {
+ ALOGE("Malformed MTP request packet");
+ ret = -1;
+ }
return ret;
}
#endif
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
index 1201f11..79b798d 100644
--- a/media/mtp/MtpRequestPacket.h
+++ b/media/mtp/MtpRequestPacket.h
@@ -43,6 +43,10 @@ public:
inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
inline void setOperationCode(MtpOperationCode code)
{ return setContainerCode(code); }
+ inline int getParameterCount() const { return mParameterCount; }
+
+private:
+ int mParameterCount;
};
}; // namespace android
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index aa43967..931a09d 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -495,6 +495,9 @@ MtpResponseCode MtpServer::doOpenSession() {
mResponse.setParameter(1, mSessionID);
return MTP_RESPONSE_SESSION_ALREADY_OPEN;
}
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
mSessionID = mRequest.getParameter(1);
mSessionOpen = true;
@@ -529,6 +532,9 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
MtpStorageID id = mRequest.getParameter(1);
MtpStorage* storage = getStorage(id);
if (!storage)
@@ -550,6 +556,8 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
MtpResponseCode MtpServer::doGetObjectPropsSupported() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectFormat format = mRequest.getParameter(1);
MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format);
mData.putAUInt16(properties);
@@ -560,6 +568,8 @@ MtpResponseCode MtpServer::doGetObjectPropsSupported() {
MtpResponseCode MtpServer::doGetObjectHandles() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -577,6 +587,8 @@ MtpResponseCode MtpServer::doGetObjectHandles() {
MtpResponseCode MtpServer::doGetNumObjects() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -599,6 +611,8 @@ MtpResponseCode MtpServer::doGetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// FIXME - check for invalid object handle
@@ -617,9 +631,13 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID handle = mRequest.getParameter(1);
MtpObjectHandleList* references = mData.getAUInt32();
+ if (!references)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
delete references;
return result;
@@ -628,6 +646,8 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
MtpResponseCode MtpServer::doGetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("GetObjectPropValue %d %s\n", handle,
@@ -639,6 +659,8 @@ MtpResponseCode MtpServer::doGetObjectPropValue() {
MtpResponseCode MtpServer::doSetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("SetObjectPropValue %d %s\n", handle,
@@ -648,6 +670,8 @@ MtpResponseCode MtpServer::doSetObjectPropValue() {
}
MtpResponseCode MtpServer::doGetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("GetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -656,6 +680,8 @@ MtpResponseCode MtpServer::doGetDevicePropValue() {
}
MtpResponseCode MtpServer::doSetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("SetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -664,6 +690,8 @@ MtpResponseCode MtpServer::doSetDevicePropValue() {
}
MtpResponseCode MtpServer::doResetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("ResetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -674,6 +702,8 @@ MtpResponseCode MtpServer::doResetDevicePropValue() {
MtpResponseCode MtpServer::doGetObjectPropList() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 5)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// use uint32_t so we can support 0xFFFFFFFF
@@ -691,6 +721,8 @@ MtpResponseCode MtpServer::doGetObjectPropList() {
MtpResponseCode MtpServer::doGetObjectInfo() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectInfo info(handle);
MtpResponseCode result = mDatabase->getObjectInfo(handle, info);
@@ -732,6 +764,8 @@ MtpResponseCode MtpServer::doGetObjectInfo() {
MtpResponseCode MtpServer::doGetObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpString pathBuf;
int64_t fileLength;
@@ -765,6 +799,8 @@ MtpResponseCode MtpServer::doGetObject() {
}
MtpResponseCode MtpServer::doGetThumb() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
size_t thumbSize;
void* thumb = mDatabase->getThumbnail(handle, thumbSize);
@@ -783,6 +819,8 @@ MtpResponseCode MtpServer::doGetThumb() {
MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset;
uint32_t length;
@@ -832,6 +870,11 @@ MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
MtpResponseCode MtpServer::doSendObjectInfo() {
MtpString path;
+ uint16_t temp16;
+ uint32_t temp32;
+
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1);
MtpStorage* storage = getStorage(storageID);
MtpObjectHandle parent = mRequest.getParameter(2);
@@ -853,25 +896,29 @@ MtpResponseCode MtpServer::doSendObjectInfo() {
}
// read only the fields we need
- mData.getUInt32(); // storage ID
- MtpObjectFormat format = mData.getUInt16();
- mData.getUInt16(); // protection status
- mSendObjectFileSize = mData.getUInt32();
- mData.getUInt16(); // thumb format
- mData.getUInt32(); // thumb compressed size
- mData.getUInt32(); // thumb pix width
- mData.getUInt32(); // thumb pix height
- mData.getUInt32(); // image pix width
- mData.getUInt32(); // image pix height
- mData.getUInt32(); // image bit depth
- mData.getUInt32(); // parent
- uint16_t associationType = mData.getUInt16();
- uint32_t associationDesc = mData.getUInt32(); // association desc
- mData.getUInt32(); // sequence number
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // storage ID
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ MtpObjectFormat format = temp16;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // protection status
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ mSendObjectFileSize = temp32;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb format
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb compressed size
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image bit depth
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // parent
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint16_t associationType = temp16;
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint32_t associationDesc = temp32; // association desc
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // sequence number
MtpStringBuffer name, created, modified;
- mData.getString(name); // file name
- mData.getString(created); // date created
- mData.getString(modified); // date modified
+ if (!mData.getString(name)) return MTP_RESPONSE_INVALID_PARAMETER; // file name
+ if (!mData.getString(created)) return MTP_RESPONSE_INVALID_PARAMETER; // date created
+ if (!mData.getString(modified)) return MTP_RESPONSE_INVALID_PARAMETER; // date modified
// keywords follow
ALOGV("name: %s format: %04X\n", (const char *)name, format);
@@ -1066,6 +1113,8 @@ static void deletePath(const char* path) {
MtpResponseCode MtpServer::doDeleteObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectFormat format = mRequest.getParameter(2);
// FIXME - support deleting all objects if handle is 0xFFFFFFFF
@@ -1087,6 +1136,8 @@ MtpResponseCode MtpServer::doDeleteObject() {
}
MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectProperty propCode = mRequest.getParameter(1);
MtpObjectFormat format = mRequest.getParameter(2);
ALOGV("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
@@ -1100,6 +1151,8 @@ MtpResponseCode MtpServer::doGetObjectPropDesc() {
}
MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty propCode = mRequest.getParameter(1);
ALOGV("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
@@ -1113,6 +1166,8 @@ MtpResponseCode MtpServer::doGetDevicePropDesc() {
MtpResponseCode MtpServer::doSendPartialObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset = mRequest.getParameter(2);
uint64_t offset2 = mRequest.getParameter(3);
@@ -1180,6 +1235,8 @@ MtpResponseCode MtpServer::doSendPartialObject() {
}
MtpResponseCode MtpServer::doTruncateObject() {
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
@@ -1199,6 +1256,8 @@ MtpResponseCode MtpServer::doTruncateObject() {
}
MtpResponseCode MtpServer::doBeginEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
if (getEditObject(handle)) {
ALOGE("object already open for edit in doBeginEditObject");
@@ -1223,6 +1282,8 @@ MtpResponseCode MtpServer::doBeginEditObject() {
}
MtpResponseCode MtpServer::doEndEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
index 2b1a9ae..5d4ebbf 100644
--- a/media/mtp/MtpStorageInfo.cpp
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -45,21 +45,23 @@ MtpStorageInfo::~MtpStorageInfo() {
free(mVolumeIdentifier);
}
-void MtpStorageInfo::read(MtpDataPacket& packet) {
+bool MtpStorageInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStorageType = packet.getUInt16();
- mFileSystemType = packet.getUInt16();
- mAccessCapability = packet.getUInt16();
- mMaxCapacity = packet.getUInt64();
- mFreeSpaceBytes = packet.getUInt64();
- mFreeSpaceObjects = packet.getUInt32();
+ if (!packet.getUInt16(mStorageType)) return false;
+ if (!packet.getUInt16(mFileSystemType)) return false;
+ if (!packet.getUInt16(mAccessCapability)) return false;
+ if (!packet.getUInt64(mMaxCapacity)) return false;
+ if (!packet.getUInt64(mFreeSpaceBytes)) return false;
+ if (!packet.getUInt32(mFreeSpaceObjects)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mStorageDescription = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVolumeIdentifier = strdup((const char *)string);
+
+ return true;
}
void MtpStorageInfo::print() {
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
index 2cb626e..35a8189 100644
--- a/media/mtp/MtpStorageInfo.h
+++ b/media/mtp/MtpStorageInfo.h
@@ -39,7 +39,7 @@ public:
MtpStorageInfo(MtpStorageID id);
virtual ~MtpStorageInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
index f3420a4..df04694 100644
--- a/media/mtp/MtpStringBuffer.cpp
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -123,11 +123,17 @@ void MtpStringBuffer::set(const uint16_t* src) {
mByteCount = dest - mBuffer;
}
-void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
- int count = packet->getUInt8();
+bool MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ uint8_t count;
+ if (!packet->getUInt8(count))
+ return false;
+
uint8_t* dest = mBuffer;
for (int i = 0; i < count; i++) {
- uint16_t ch = packet->getUInt16();
+ uint16_t ch;
+
+ if (!packet->getUInt16(ch))
+ return false;
if (ch >= 0x0800) {
*dest++ = (uint8_t)(0xE0 | (ch >> 12));
*dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
@@ -142,6 +148,7 @@ void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
*dest++ = 0;
mCharCount = count;
mByteCount = dest - mBuffer;
+ return true;
}
void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
index e5150df..85d91e8 100644
--- a/media/mtp/MtpStringBuffer.h
+++ b/media/mtp/MtpStringBuffer.h
@@ -46,7 +46,7 @@ public:
void set(const char* src);
void set(const uint16_t* src);
- void readFromPacket(MtpDataPacket* packet);
+ bool readFromPacket(MtpDataPacket* packet);
void writeToPacket(MtpDataPacket* packet) const;
inline int getCharCount() const { return mCharCount; }
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 970a43c..db57d0b 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -256,18 +256,23 @@ PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
len -= datalen;
}
- // there are <numentries> in the buffer, we need
- // (source buffer size) + 4 + (4 * numentries) bytes for the PsshInfo structure
- size_t newsize = buffer->size() + 4 + (4 * numentries);
+ // there are <numentries> in the source buffer, we need
+ // (source buffer size) - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ // + ((sizeof(void*) + sizeof(size_t)) * numentries) bytes for the PsshInfo structure
+ // Or in other words, the data lengths in the source structure are replaced by size_t
+ // (which may be the same size or larger, for 64 bit), and in addition there is an
+ // extra pointer for each entry, and an extra size_t for the entire PsshInfo.
+ size_t newsize = buffer->size() - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ + ((sizeof(void*) + sizeof(size_t)) * numentries);
ex->mPsshBuf = new ABuffer(newsize);
ex->mPsshBuf->setRange(0, newsize);
// copy data
const uint8_t* src = buffer->data();
uint8_t* dst = ex->mPsshBuf->data();
- uint8_t* dstdata = dst + 4 + numentries * sizeof(PsshEntry);
- *((uint32_t*)dst) = numentries;
- dst += 4;
+ uint8_t* dstdata = dst + sizeof(size_t) + numentries * sizeof(PsshEntry);
+ *((size_t*)dst) = numentries;
+ dst += sizeof(size_t);
for (size_t i = 0; i < numentries; i++) {
// copy uuid
memcpy(dst, src, 16);
@@ -276,14 +281,14 @@ PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
// get/copy data length
uint32_t datalen = *((uint32_t*)src);
- memcpy(dst, src, 4);
- src += 4;
- dst += 4;
+ *((size_t*)dst) = datalen;
+ src += sizeof(uint32_t);
+ dst += sizeof(size_t);
// the next entry in the destination is a pointer to the actual data, which we store
// after the array of PsshEntry
- memcpy(dst, &dstdata, sizeof(dstdata));
- dst += 4;
+ *((void**)dst) = dstdata;
+ dst += sizeof(void*);
// copy the actual data
memcpy(dstdata, src, datalen);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index c571cf5..fed85e9 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -650,6 +650,7 @@ sp<IAudioTrack> AudioFlinger::createTrack(
}
}
+ setAudioHwSyncForSession_l(thread, (audio_session_t)lSessionId);
}
if (lStatus != NO_ERROR) {
@@ -1604,22 +1605,69 @@ status_t AudioFlinger::setLowRamDevice(bool isLowRamDevice)
audio_hw_sync_t AudioFlinger::getAudioHwSyncForSession(audio_session_t sessionId)
{
Mutex::Autolock _l(mLock);
+
+ ssize_t index = mHwAvSyncIds.indexOfKey(sessionId);
+ if (index >= 0) {
+ ALOGV("getAudioHwSyncForSession found ID %d for session %d",
+ mHwAvSyncIds.valueAt(index), sessionId);
+ return mHwAvSyncIds.valueAt(index);
+ }
+
+ audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ if (dev == NULL) {
+ return AUDIO_HW_SYNC_INVALID;
+ }
+ char *reply = dev->get_parameters(dev, AUDIO_PARAMETER_HW_AV_SYNC);
+ AudioParameter param = AudioParameter(String8(reply));
+ free(reply);
+
+ int value;
+ if (param.getInt(String8(AUDIO_PARAMETER_HW_AV_SYNC), value) != NO_ERROR) {
+ ALOGW("getAudioHwSyncForSession error getting sync for session %d", sessionId);
+ return AUDIO_HW_SYNC_INVALID;
+ }
+
+ // allow only one session for a given HW A/V sync ID.
+ for (size_t i = 0; i < mHwAvSyncIds.size(); i++) {
+ if (mHwAvSyncIds.valueAt(i) == (audio_hw_sync_t)value) {
+ ALOGV("getAudioHwSyncForSession removing ID %d for session %d",
+ value, mHwAvSyncIds.keyAt(i));
+ mHwAvSyncIds.removeItemsAt(i);
+ break;
+ }
+ }
+
+ mHwAvSyncIds.add(sessionId, value);
+
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
sp<PlaybackThread> thread = mPlaybackThreads.valueAt(i);
- if ((thread->hasAudioSession(sessionId) & ThreadBase::TRACK_SESSION) != 0) {
- // A session can only be on one thread, so exit after first match
- String8 reply = thread->getParameters(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC));
- AudioParameter param = AudioParameter(reply);
- int value;
- if (param.getInt(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC), value) == NO_ERROR) {
- return value;
- }
+ uint32_t sessions = thread->hasAudioSession(sessionId);
+ if (sessions & PlaybackThread::TRACK_SESSION) {
+ AudioParameter param = AudioParameter();
+ param.addInt(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC), value);
+ thread->setParameters(param.toString());
break;
}
}
- return AUDIO_HW_SYNC_INVALID;
+
+ ALOGV("getAudioHwSyncForSession adding ID %d for session %d", value, sessionId);
+ return (audio_hw_sync_t)value;
+}
+
+// setAudioHwSyncForSession_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::setAudioHwSyncForSession_l(PlaybackThread *thread, audio_session_t sessionId)
+{
+ ssize_t index = mHwAvSyncIds.indexOfKey(sessionId);
+ if (index >= 0) {
+ audio_hw_sync_t syncId = mHwAvSyncIds.valueAt(index);
+ ALOGV("setAudioHwSyncForSession_l found ID %d for session %d", syncId, sessionId);
+ AudioParameter param = AudioParameter();
+ param.addInt(String8(AUDIO_PARAMETER_STREAM_HW_AV_SYNC), syncId);
+ thread->setParameters(param.toString());
+ }
}
+
// ----------------------------------------------------------------------------
@@ -1928,13 +1976,13 @@ sp<AudioFlinger::RecordThread> AudioFlinger::openInput_l(audio_module_handle_t m
status_t status = inHwHal->open_input_stream(inHwHal, *input, device, &halconfig,
&inStream, flags, address.string(), source);
ALOGV("openInput_l() openInputStream returned input %p, SamplingRate %d"
- ", Format %#x, Channels %x, flags %#x, status %d",
+ ", Format %#x, Channels %x, flags %#x, status %d addr %s",
inStream,
halconfig.sample_rate,
halconfig.format,
halconfig.channel_mask,
flags,
- status);
+ status, address.string());
// If the input could not be opened with the requested parameters and we can handle the
// conversion internally, try to open again with the proposed parameters. The AudioFlinger can
@@ -2585,7 +2633,8 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId,
// Check whether the destination thread has a channel count of FCC_2, which is
// currently required for (most) effects. Prevent moving the effect chain here rather
// than disabling the addEffect_l() call in dstThread below.
- if (dstThread->mChannelCount != FCC_2) {
+ if ((dstThread->type() == ThreadBase::MIXER || dstThread->type() == ThreadBase::DUPLICATING) &&
+ dstThread->mChannelCount != FCC_2) {
ALOGW("moveEffectChain_l() effect chain failed because"
" destination thread %p channel count(%u) != %u",
dstThread, dstThread->mChannelCount, FCC_2);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 1003017..4fb372d 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -733,6 +733,8 @@ private:
// Effect chains without a valid thread
DefaultKeyedVector< audio_session_t , sp<EffectChain> > mOrphanEffectChains;
+ // list of sessions for which a valid HW A/V sync ID was retrieved from the HAL
+ DefaultKeyedVector< audio_session_t , audio_hw_sync_t >mHwAvSyncIds;
private:
sp<Client> registerPid(pid_t pid); // always returns non-0
@@ -741,6 +743,7 @@ private:
void closeOutputInternal_l(sp<PlaybackThread> thread);
status_t closeInput_nonvirtual(audio_io_handle_t input);
void closeInputInternal_l(sp<RecordThread> thread);
+ void setAudioHwSyncForSession_l(PlaybackThread *thread, audio_session_t sessionId);
#ifdef TEE_SINK
// all record threads serially share a common tee sink, which is re-created on format change
diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp
index 8246fef..fae19a1 100644
--- a/services/audioflinger/ServiceUtilities.cpp
+++ b/services/audioflinger/ServiceUtilities.cpp
@@ -50,6 +50,13 @@ bool captureHotwordAllowed() {
return ok;
}
+bool captureFmTunerAllowed() {
+ static const String16 sCaptureFmTunerAllowed("android.permission.ACCESS_FM_RADIO");
+ bool ok = checkCallingPermission(sCaptureFmTunerAllowed);
+ if (!ok) ALOGE("android.permission.ACCESS_FM_RADIO");
+ return ok;
+}
+
bool settingsAllowed() {
if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true;
static const String16 sAudioSettings("android.permission.MODIFY_AUDIO_SETTINGS");
diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h
index df6f6f4..ce18a90 100644
--- a/services/audioflinger/ServiceUtilities.h
+++ b/services/audioflinger/ServiceUtilities.h
@@ -23,6 +23,7 @@ extern pid_t getpid_cached;
bool recordingAllowed();
bool captureAudioOutputAllowed();
bool captureHotwordAllowed();
+bool captureFmTunerAllowed();
bool settingsAllowed();
bool modifyAudioRoutingAllowed();
bool dumpAllowed();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index ed8293d..a75f784 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4660,7 +4660,11 @@ void AudioFlinger::DuplicatingThread::threadLoop_mix()
if (outputsReady(outputTracks)) {
mAudioMixer->process(AudioBufferProvider::kInvalidPTS);
} else {
- memset(mSinkBuffer, 0, mSinkBufferSize);
+ if (mMixerBufferValid) {
+ memset(mMixerBuffer, 0, mMixerBufferSize);
+ } else {
+ memset(mSinkBuffer, 0, mSinkBufferSize);
+ }
}
sleepTime = 0;
writeFrames = mNormalFrameCount;
diff --git a/services/audiopolicy/AudioPolicyEffects.cpp b/services/audiopolicy/AudioPolicyEffects.cpp
index 3c1c042..e7e1b36 100644
--- a/services/audiopolicy/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/AudioPolicyEffects.cpp
@@ -105,26 +105,28 @@ status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
inputDesc->mRefCount++;
ALOGV("addInputEffects(): input: %d, refCount: %d", input, inputDesc->mRefCount);
-
- Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
- for (size_t i = 0; i < effects.size(); i++) {
- EffectDesc *effect = effects[i];
- sp<AudioEffect> fx = new AudioEffect(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
- status_t status = fx->initCheck();
- if (status != NO_ERROR && status != ALREADY_EXISTS) {
- ALOGW("addInputEffects(): failed to create Fx %s on source %d",
+ if (inputDesc->mRefCount == 1) {
+ Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
+ for (size_t i = 0; i < effects.size(); i++) {
+ EffectDesc *effect = effects[i];
+ sp<AudioEffect> fx = new AudioEffect(NULL, &effect->mUuid, -1, 0, 0,
+ audioSession, input);
+ status_t status = fx->initCheck();
+ if (status != NO_ERROR && status != ALREADY_EXISTS) {
+ ALOGW("addInputEffects(): failed to create Fx %s on source %d",
+ effect->mName, (int32_t)aliasSource);
+ // fx goes out of scope and strong ref on AudioEffect is released
+ continue;
+ }
+ for (size_t j = 0; j < effect->mParams.size(); j++) {
+ fx->setParameter(effect->mParams[j]);
+ }
+ ALOGV("addInputEffects(): added Fx %s on source: %d",
effect->mName, (int32_t)aliasSource);
- // fx goes out of scope and strong ref on AudioEffect is released
- continue;
- }
- for (size_t j = 0; j < effect->mParams.size(); j++) {
- fx->setParameter(effect->mParams[j]);
+ inputDesc->mEffects.add(fx);
}
- ALOGV("addInputEffects(): added Fx %s on source: %d", effect->mName, (int32_t)aliasSource);
- inputDesc->mEffects.add(fx);
+ inputDesc->setProcessorEnabled(true);
}
- inputDesc->setProcessorEnabled(true);
-
return status;
}
@@ -241,26 +243,28 @@ status_t AudioPolicyEffects::addOutputSessionEffects(audio_io_handle_t output,
}
procDesc->mRefCount++;
- ALOGV("addOutputSessionEffects(): session: %d, refCount: %d", audioSession, procDesc->mRefCount);
-
- Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
- for (size_t i = 0; i < effects.size(); i++) {
- EffectDesc *effect = effects[i];
- sp<AudioEffect> fx = new AudioEffect(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
- status_t status = fx->initCheck();
- if (status != NO_ERROR && status != ALREADY_EXISTS) {
- ALOGE("addOutputSessionEffects(): failed to create Fx %s on session %d",
- effect->mName, audioSession);
- // fx goes out of scope and strong ref on AudioEffect is released
- continue;
+ ALOGV("addOutputSessionEffects(): session: %d, refCount: %d",
+ audioSession, procDesc->mRefCount);
+ if (procDesc->mRefCount == 1) {
+ Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
+ for (size_t i = 0; i < effects.size(); i++) {
+ EffectDesc *effect = effects[i];
+ sp<AudioEffect> fx = new AudioEffect(NULL, &effect->mUuid, 0, 0, 0,
+ audioSession, output);
+ status_t status = fx->initCheck();
+ if (status != NO_ERROR && status != ALREADY_EXISTS) {
+ ALOGE("addOutputSessionEffects(): failed to create Fx %s on session %d",
+ effect->mName, audioSession);
+ // fx goes out of scope and strong ref on AudioEffect is released
+ continue;
+ }
+ ALOGV("addOutputSessionEffects(): added Fx %s on session: %d for stream: %d",
+ effect->mName, audioSession, (int32_t)stream);
+ procDesc->mEffects.add(fx);
}
- ALOGV("addOutputSessionEffects(): added Fx %s on session: %d for stream: %d",
- effect->mName, audioSession, (int32_t)stream);
- procDesc->mEffects.add(fx);
- }
-
- procDesc->setProcessorEnabled(true);
+ procDesc->setProcessorEnabled(true);
+ }
return status;
}
@@ -281,7 +285,8 @@ status_t AudioPolicyEffects::releaseOutputSessionEffects(audio_io_handle_t outpu
EffectVector *procDesc = mOutputSessions.valueAt(index);
procDesc->mRefCount--;
- ALOGV("releaseOutputSessionEffects(): session: %d, refCount: %d", audioSession, procDesc->mRefCount);
+ ALOGV("releaseOutputSessionEffects(): session: %d, refCount: %d",
+ audioSession, procDesc->mRefCount);
if (procDesc->mRefCount == 0) {
procDesc->setProcessorEnabled(false);
procDesc->mEffects.clear();
diff --git a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
index 6cd0ac8..c06ca72 100644
--- a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
@@ -129,8 +129,11 @@ audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream,
audio_output_flags_t flags,
const audio_offload_info_t *offloadInfo)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return AUDIO_IO_HANDLE_NONE;
+ }
if (mAudioPolicyManager == NULL) {
- return 0;
+ return AUDIO_IO_HANDLE_NONE;
}
ALOGV("getOutput()");
Mutex::Autolock _l(mLock);
@@ -158,6 +161,9 @@ status_t AudioPolicyService::startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return BAD_VALUE;
+ }
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
@@ -182,6 +188,9 @@ status_t AudioPolicyService::stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return BAD_VALUE;
+ }
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
@@ -238,11 +247,13 @@ audio_io_handle_t AudioPolicyService::getInput(audio_source_t inputSource,
return 0;
}
// already checked by client, but double-check in case the client wrapper is bypassed
- if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD) {
+ if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD &&
+ inputSource != AUDIO_SOURCE_FM_TUNER) {
return 0;
}
- if ((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) {
+ if (((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) ||
+ ((inputSource == AUDIO_SOURCE_FM_TUNER) && !captureFmTunerAllowed())) {
return 0;
}
audio_io_handle_t input;
@@ -366,6 +377,9 @@ status_t AudioPolicyService::getStreamVolumeIndex(audio_stream_type_t stream,
uint32_t AudioPolicyService::getStrategyForStream(audio_stream_type_t stream)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return 0;
+ }
if (mAudioPolicyManager == NULL) {
return 0;
}
@@ -376,8 +390,11 @@ uint32_t AudioPolicyService::getStrategyForStream(audio_stream_type_t stream)
audio_devices_t AudioPolicyService::getDevicesForStream(audio_stream_type_t stream)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return AUDIO_DEVICE_NONE;
+ }
if (mAudioPolicyManager == NULL) {
- return (audio_devices_t)0;
+ return AUDIO_DEVICE_NONE;
}
return mAudioPolicyManager->getDevicesForStream(stream);
}
@@ -422,8 +439,11 @@ status_t AudioPolicyService::setEffectEnabled(int id, bool enabled)
bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return false;
+ }
if (mAudioPolicyManager == NULL) {
- return 0;
+ return false;
}
Mutex::Autolock _l(mLock);
return mAudioPolicyManager->isStreamActive(stream, inPastMs);
@@ -431,8 +451,11 @@ bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inP
bool AudioPolicyService::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return false;
+ }
if (mAudioPolicyManager == NULL) {
- return 0;
+ return false;
}
Mutex::Autolock _l(mLock);
return mAudioPolicyManager->isStreamActiveRemotely(stream, inPastMs);
diff --git a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp b/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
index e1e81e1..09476c1 100644
--- a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
+++ b/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
@@ -134,8 +134,11 @@ audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream,
audio_output_flags_t flags,
const audio_offload_info_t *offloadInfo)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return AUDIO_IO_HANDLE_NONE;
+ }
if (mpAudioPolicy == NULL) {
- return 0;
+ return AUDIO_IO_HANDLE_NONE;
}
ALOGV("getOutput()");
Mutex::Autolock _l(mLock);
@@ -147,6 +150,9 @@ status_t AudioPolicyService::startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return BAD_VALUE;
+ }
if (mpAudioPolicy == NULL) {
return NO_INIT;
}
@@ -172,6 +178,9 @@ status_t AudioPolicyService::stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return BAD_VALUE;
+ }
if (mpAudioPolicy == NULL) {
return NO_INIT;
}
@@ -228,11 +237,13 @@ audio_io_handle_t AudioPolicyService::getInput(audio_source_t inputSource,
return 0;
}
// already checked by client, but double-check in case the client wrapper is bypassed
- if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD) {
+ if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD &&
+ inputSource != AUDIO_SOURCE_FM_TUNER) {
return 0;
}
- if ((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) {
+ if (((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) ||
+ ((inputSource == AUDIO_SOURCE_FM_TUNER) && !captureFmTunerAllowed())) {
return 0;
}
@@ -368,6 +379,9 @@ status_t AudioPolicyService::getStreamVolumeIndex(audio_stream_type_t stream,
uint32_t AudioPolicyService::getStrategyForStream(audio_stream_type_t stream)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return 0;
+ }
if (mpAudioPolicy == NULL) {
return 0;
}
@@ -378,8 +392,11 @@ uint32_t AudioPolicyService::getStrategyForStream(audio_stream_type_t stream)
audio_devices_t AudioPolicyService::getDevicesForStream(audio_stream_type_t stream)
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return AUDIO_DEVICE_NONE;
+ }
if (mpAudioPolicy == NULL) {
- return (audio_devices_t)0;
+ return AUDIO_DEVICE_NONE;
}
return mpAudioPolicy->get_devices_for_stream(mpAudioPolicy, stream);
}
@@ -424,8 +441,11 @@ status_t AudioPolicyService::setEffectEnabled(int id, bool enabled)
bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return false;
+ }
if (mpAudioPolicy == NULL) {
- return 0;
+ return false;
}
Mutex::Autolock _l(mLock);
return mpAudioPolicy->is_stream_active(mpAudioPolicy, stream, inPastMs);
@@ -433,8 +453,11 @@ bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inP
bool AudioPolicyService::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
{
+ if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
+ return false;
+ }
if (mpAudioPolicy == NULL) {
- return 0;
+ return false;
}
Mutex::Autolock _l(mLock);
return mpAudioPolicy->is_stream_active_remotely(mpAudioPolicy, stream, inPastMs);
diff --git a/services/audiopolicy/AudioPolicyManager.cpp b/services/audiopolicy/AudioPolicyManager.cpp
index 536987a..584e170 100644
--- a/services/audiopolicy/AudioPolicyManager.cpp
+++ b/services/audiopolicy/AudioPolicyManager.cpp
@@ -26,7 +26,7 @@
// A device mask for all audio input devices that are considered "virtual" when evaluating
// active inputs in getActiveInput()
-#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL AUDIO_DEVICE_IN_REMOTE_SUBMIX
+#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_FM_TUNER)
// A device mask for all audio output devices that are considered "remote" when evaluating
// active output devices in isStreamActiveRemotely()
#define APM_AUDIO_OUT_DEVICE_REMOTE_ALL AUDIO_DEVICE_OUT_REMOTE_SUBMIX
@@ -216,6 +216,10 @@ status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
const char *device_address)
{
String8 address = (device_address == NULL) ? String8("") : String8(device_address);
+ // handle legacy remote submix case where the address was not always specified
+ if (deviceDistinguishesOnAddress(device) && (address.length() == 0)) {
+ address = String8("0");
+ }
ALOGV("setDeviceConnectionState() device: %x, state %d, address %s",
device, state, address.string());
@@ -419,6 +423,10 @@ audio_policy_dev_state_t AudioPolicyManager::getDeviceConnectionState(audio_devi
audio_policy_dev_state_t state = AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device);
devDesc->mAddress = (device_address == NULL) ? String8("") : String8(device_address);
+ // handle legacy remote submix case where the address was not always specified
+ if (deviceDistinguishesOnAddress(device) && (devDesc->mAddress.length() == 0)) {
+ devDesc->mAddress = String8("0");
+ }
ssize_t index;
DeviceVector *deviceVector;
@@ -707,7 +715,7 @@ void AudioPolicyManager::setForceUse(audio_policy_force_use_t usage,
config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE &&
- config != AUDIO_POLICY_FORCE_NO_BT_A2DP) {
+ config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) {
ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
return;
}
@@ -854,7 +862,7 @@ audio_io_handle_t AudioPolicyManager::getOutputForAttr(const audio_attributes_t
flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC);
}
- ALOGV("getOutputForAttr() device %d, samplingRate %d, format %x, channelMask %x, flags %x",
+ ALOGV("getOutputForAttr() device 0x%x, samplingRate %d, format %x, channelMask %x, flags %x",
device, samplingRate, format, channelMask, flags);
audio_stream_type_t stream = streamTypefromAttributesInt(attr);
@@ -1119,6 +1127,20 @@ status_t AudioPolicyManager::startOutput(audio_io_handle_t output,
return BAD_VALUE;
}
+ // cannot start playback of STREAM_TTS if any other output is being used
+ uint32_t beaconMuteLatency = 0;
+ if (stream == AUDIO_STREAM_TTS) {
+ ALOGV("\t found BEACON stream");
+ if (isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
+ return INVALID_OPERATION;
+ } else {
+ beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
+ }
+ } else {
+ // some playback other than beacon starts
+ beaconMuteLatency = handleEventForBeacon(STARTING_OUTPUT);
+ }
+
sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
// increment usage count for this stream on the requested output:
@@ -1130,8 +1152,9 @@ status_t AudioPolicyManager::startOutput(audio_io_handle_t output,
audio_devices_t newDevice = getNewOutputDevice(output, false /*fromCache*/);
routing_strategy strategy = getStrategy(stream);
bool shouldWait = (strategy == STRATEGY_SONIFICATION) ||
- (strategy == STRATEGY_SONIFICATION_RESPECTFUL);
- uint32_t waitMs = 0;
+ (strategy == STRATEGY_SONIFICATION_RESPECTFUL) ||
+ (beaconMuteLatency > 0);
+ uint32_t waitMs = beaconMuteLatency;
bool force = false;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -1145,7 +1168,8 @@ status_t AudioPolicyManager::startOutput(audio_io_handle_t output,
force = true;
}
// wait for audio on other active outputs to be presented when starting
- // a notification so that audio focus effect can propagate.
+ // a notification so that audio focus effect can propagate, or that a mute/unmute
+ // event occurred for beacon
uint32_t latency = desc->latency();
if (shouldWait && desc->isActive(latency * 2) && (waitMs < latency)) {
waitMs = latency;
@@ -1189,6 +1213,9 @@ status_t AudioPolicyManager::stopOutput(audio_io_handle_t output,
sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
+ // always handle stream stop, check which stream type is stopping
+ handleEventForBeacon(stream == AUDIO_STREAM_TTS ? STOPPING_BEACON : STOPPING_OUTPUT);
+
// handle special case for sonification while in call
if (isInCall()) {
handleIncallSonification(stream, false, false);
@@ -1356,11 +1383,14 @@ audio_io_handle_t AudioPolicyManager::getInput(audio_source_t inputSource,
config.channel_mask = channelMask;
config.format = format;
+ // handle legacy remote submix case where the address was not always specified
+ String8 address = deviceDistinguishesOnAddress(device) ? String8("0") : String8("");
+
status_t status = mpClientInterface->openInput(profile->mModule->mHandle,
&input,
&config,
&device,
- String8(""),
+ address,
halInputSource,
flags);
@@ -1584,12 +1614,17 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream,
}
mStreams[stream].mIndexCur.add(device, index);
- // compute and apply stream volume on all outputs according to connected device
+ // update volume on all outputs whose current device is also selected by the same
+ // strategy as the device specified by the caller
+ audio_devices_t strategyDevice = getDeviceForStrategy(getStrategy(stream), true /*fromCache*/);
+ if ((device != AUDIO_DEVICE_OUT_DEFAULT) && (device & strategyDevice) == 0) {
+ return NO_ERROR;
+ }
status_t status = NO_ERROR;
for (size_t i = 0; i < mOutputs.size(); i++) {
audio_devices_t curDevice =
getDeviceForVolume(mOutputs.valueAt(i)->device());
- if ((device == AUDIO_DEVICE_OUT_DEFAULT) || (device == curDevice)) {
+ if ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & strategyDevice) != 0)) {
status_t volStatus = checkAndSetVolume(stream, index, mOutputs.keyAt(i), curDevice);
if (volStatus != NO_ERROR) {
status = volStatus;
@@ -2653,7 +2688,10 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa
mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0),
mA2dpSuspended(false),
mSpeakerDrcEnabled(false), mNextUniqueId(1),
- mAudioPortGeneration(1)
+ mAudioPortGeneration(1),
+ mBeaconMuteRefCount(0),
+ mBeaconPlayingRefCount(0),
+ mBeaconMuted(false)
{
mUidCached = getuid();
mpClientInterface = clientInterface;
@@ -2787,6 +2825,14 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa
inputDesc->mInputSource = AUDIO_SOURCE_MIC;
inputDesc->mDevice = profileType;
+ // find the address
+ DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(profileType);
+ // the inputs vector must be of size 1, but we don't want to crash here
+ String8 address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress
+ : String8("");
+ ALOGV(" for input device 0x%x using address %s", profileType, address.string());
+ ALOGE_IF(inputDevices.size() == 0, "Input device list is empty!");
+
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = inputDesc->mSamplingRate;
config.channel_mask = inputDesc->mChannelMask;
@@ -2796,7 +2842,7 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa
&input,
&config,
&inputDesc->mDevice,
- String8(""),
+ address,
AUDIO_SOURCE_MIC,
AUDIO_INPUT_FLAG_NONE);
@@ -3816,6 +3862,8 @@ audio_devices_t AudioPolicyManager::getNewOutputDevice(audio_io_handle_t output,
// use device for strategy media
// 7: the strategy DTMF is active on the output:
// use device for strategy DTMF
+ // 8: the strategy for beacon, a.k.a. "transmitted through speaker" is active on the output:
+ // use device for strategy t-t-s
if (outputDesc->isStrategyActive(STRATEGY_ENFORCED_AUDIBLE) &&
mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
device = getDeviceForStrategy(STRATEGY_ENFORCED_AUDIBLE, fromCache);
@@ -3832,6 +3880,8 @@ audio_devices_t AudioPolicyManager::getNewOutputDevice(audio_io_handle_t output,
device = getDeviceForStrategy(STRATEGY_MEDIA, fromCache);
} else if (outputDesc->isStrategyActive(STRATEGY_DTMF)) {
device = getDeviceForStrategy(STRATEGY_DTMF, fromCache);
+ } else if (outputDesc->isStrategyActive(STRATEGY_TRANSMITTED_THROUGH_SPEAKER)) {
+ device = getDeviceForStrategy(STRATEGY_TRANSMITTED_THROUGH_SPEAKER, fromCache);
}
ALOGV("getNewOutputDevice() selected device %x", device);
@@ -3910,16 +3960,20 @@ AudioPolicyManager::routing_strategy AudioPolicyManager::getStrategy(
case AUDIO_STREAM_SYSTEM:
// NOTE: SYSTEM stream uses MEDIA strategy because muting music and switching outputs
// while key clicks are played produces a poor result
- case AUDIO_STREAM_TTS:
case AUDIO_STREAM_MUSIC:
return STRATEGY_MEDIA;
case AUDIO_STREAM_ENFORCED_AUDIBLE:
return STRATEGY_ENFORCED_AUDIBLE;
+ case AUDIO_STREAM_TTS:
+ return STRATEGY_TRANSMITTED_THROUGH_SPEAKER;
}
}
uint32_t AudioPolicyManager::getStrategyForAttr(const audio_attributes_t *attr) {
// flags to strategy mapping
+ if ((attr->flags & AUDIO_FLAG_BEACON) == AUDIO_FLAG_BEACON) {
+ return (uint32_t) STRATEGY_TRANSMITTED_THROUGH_SPEAKER;
+ }
if ((attr->flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) == AUDIO_FLAG_AUDIBILITY_ENFORCED) {
return (uint32_t) STRATEGY_ENFORCED_AUDIBLE;
}
@@ -3967,6 +4021,74 @@ void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t
}
}
+bool AudioPolicyManager::isAnyOutputActive(audio_stream_type_t streamToIgnore) {
+ for (size_t s = 0 ; s < AUDIO_STREAM_CNT ; s++) {
+ if (s == (size_t) streamToIgnore) {
+ continue;
+ }
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
+ if (outputDesc->mRefCount[s] != 0) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+uint32_t AudioPolicyManager::handleEventForBeacon(int event) {
+ switch(event) {
+ case STARTING_OUTPUT:
+ mBeaconMuteRefCount++;
+ break;
+ case STOPPING_OUTPUT:
+ if (mBeaconMuteRefCount > 0) {
+ mBeaconMuteRefCount--;
+ }
+ break;
+ case STARTING_BEACON:
+ mBeaconPlayingRefCount++;
+ break;
+ case STOPPING_BEACON:
+ if (mBeaconPlayingRefCount > 0) {
+ mBeaconPlayingRefCount--;
+ }
+ break;
+ }
+
+ if (mBeaconMuteRefCount > 0) {
+ // any playback causes beacon to be muted
+ return setBeaconMute(true);
+ } else {
+ // no other playback: unmute when beacon starts playing, mute when it stops
+ return setBeaconMute(mBeaconPlayingRefCount == 0);
+ }
+}
+
+uint32_t AudioPolicyManager::setBeaconMute(bool mute) {
+ ALOGV("setBeaconMute(%d) mBeaconMuteRefCount=%d mBeaconPlayingRefCount=%d",
+ mute, mBeaconMuteRefCount, mBeaconPlayingRefCount);
+ // keep track of muted state to avoid repeating mute/unmute operations
+ if (mBeaconMuted != mute) {
+ // mute/unmute AUDIO_STREAM_TTS on all outputs
+ ALOGV("\t muting %d", mute);
+ uint32_t maxLatency = 0;
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ setStreamMute(AUDIO_STREAM_TTS, mute/*on*/,
+ desc->mIoHandle,
+ 0 /*delay*/, AUDIO_DEVICE_NONE);
+ const uint32_t latency = desc->latency() * 2;
+ if (latency > maxLatency) {
+ maxLatency = latency;
+ }
+ }
+ mBeaconMuted = mute;
+ return maxLatency;
+ }
+ return 0;
+}
+
audio_devices_t AudioPolicyManager::getDeviceForStrategy(routing_strategy strategy,
bool fromCache)
{
@@ -3980,6 +4102,14 @@ audio_devices_t AudioPolicyManager::getDeviceForStrategy(routing_strategy strate
audio_devices_t availableOutputDeviceTypes = mAvailableOutputDevices.types();
switch (strategy) {
+ case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
+ device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER;
+ if (!device) {
+ ALOGE("getDeviceForStrategy() no device found for "\
+ "STRATEGY_TRANSMITTED_THROUGH_SPEAKER");
+ }
+ break;
+
case STRATEGY_SONIFICATION_RESPECTFUL:
if (isInCall()) {
device = getDeviceForStrategy(STRATEGY_SONIFICATION, false /*fromCache*/);
@@ -4158,6 +4288,10 @@ audio_devices_t AudioPolicyManager::getDeviceForStrategy(routing_strategy strate
device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
}
}
+ if ((device2 == AUDIO_DEVICE_NONE) &&
+ (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] == AUDIO_POLICY_FORCE_SPEAKER)) {
+ device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER;
+ }
if (device2 == AUDIO_DEVICE_NONE) {
device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
}
@@ -4248,6 +4382,7 @@ uint32_t AudioPolicyManager::checkDeviceMuteStrategies(sp<AudioOutputDescriptor>
for (size_t i = 0; i < NUM_STRATEGIES; i++) {
audio_devices_t curDevice = getDeviceForStrategy((routing_strategy)i, false /*fromCache*/);
+ curDevice = curDevice & outputDesc->mProfile->mSupportedDevices.types();
bool mute = shouldMute && (curDevice & device) && (curDevice != device);
bool doMute = false;
@@ -4348,11 +4483,15 @@ uint32_t AudioPolicyManager::setOutputDevice(audio_io_handle_t output,
muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevice, delayMs);
// Do not change the routing if:
- // - the requested device is AUDIO_DEVICE_NONE
- // - the requested device is the same as current device and force is not specified.
+ // the requested device is AUDIO_DEVICE_NONE
+ // OR the requested device is the same as current device
+ // AND force is not specified
+ // AND the output is connected by a valid audio patch.
// Doing this check here allows the caller to call setOutputDevice() without conditions
- if ((device == AUDIO_DEVICE_NONE || device == prevDevice) && !force) {
- ALOGV("setOutputDevice() setting same device %04x or null device for output %d", device, output);
+ if ((device == AUDIO_DEVICE_NONE || device == prevDevice) && !force &&
+ outputDesc->mPatchHandle != 0) {
+ ALOGV("setOutputDevice() setting same device %04x or null device for output %d",
+ device, output);
return muteWaitMs;
}
@@ -4667,6 +4806,11 @@ audio_devices_t AudioPolicyManager::getDeviceForInputSource(audio_source_t input
device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
}
break;
+ case AUDIO_SOURCE_FM_TUNER:
+ if (availableDeviceTypes & AUDIO_DEVICE_IN_FM_TUNER) {
+ device = AUDIO_DEVICE_IN_FM_TUNER;
+ }
+ break;
default:
ALOGW("getDeviceForInputSource() invalid input source %d", inputSource);
break;
@@ -4891,6 +5035,16 @@ const AudioPolicyManager::VolumeCurvePoint
};
const AudioPolicyManager::VolumeCurvePoint
+ AudioPolicyManager::sLinearVolumeCurve[AudioPolicyManager::VOLCNT] = {
+ {0, -96.0f}, {33, -68.0f}, {66, -34.0f}, {100, 0.0f}
+};
+
+const AudioPolicyManager::VolumeCurvePoint
+ AudioPolicyManager::sSilentVolumeCurve[AudioPolicyManager::VOLCNT] = {
+ {0, -96.0f}, {1, -96.0f}, {2, -96.0f}, {100, -96.0f}
+};
+
+const AudioPolicyManager::VolumeCurvePoint
*AudioPolicyManager::sVolumeProfiles[AUDIO_STREAM_CNT]
[AudioPolicyManager::DEVICE_CATEGORY_CNT] = {
{ // AUDIO_STREAM_VOICE_CALL
@@ -4948,10 +5102,11 @@ const AudioPolicyManager::VolumeCurvePoint
sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
},
{ // AUDIO_STREAM_TTS
- sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET
- sSpeakerMediaVolumeCurve, // DEVICE_CATEGORY_SPEAKER
- sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_EARPIECE
- sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
+ // "Transmitted Through Speaker": always silent except on DEVICE_CATEGORY_SPEAKER
+ sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET
+ sLinearVolumeCurve, // DEVICE_CATEGORY_SPEAKER
+ sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE
+ sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
},
};
@@ -5843,12 +5998,28 @@ void AudioPolicyManager::AudioPort::importAudioPort(const sp<AudioPort> port) {
}
}
}
+ for (size_t k = 0 ; k < port->mGains.size() ; k++) {
+ sp<AudioGain> gain = port->mGains.itemAt(k);
+ if (gain != 0) {
+ bool hasGain = false;
+ for (size_t l = 0 ; l < mGains.size() ; l++) {
+ if (gain == mGains.itemAt(l)) {
+ hasGain = true;
+ break;
+ }
+ }
+ if (!hasGain) { // never import a gain twice
+ mGains.add(gain);
+ }
+ }
+ }
}
void AudioPolicyManager::AudioPort::clearCapabilities() {
mChannelMasks.clear();
mFormats.clear();
mSamplingRates.clear();
+ mGains.clear();
}
void AudioPolicyManager::AudioPort::loadSamplingRates(char *name)
@@ -6791,7 +6962,11 @@ void AudioPolicyManager::DeviceVector::loadDevicesFromName(char *name,
ARRAY_SIZE(sDeviceNameToEnumTable),
devName);
if (type != AUDIO_DEVICE_NONE) {
- add(new DeviceDescriptor(String8(""), type));
+ sp<DeviceDescriptor> dev = new DeviceDescriptor(String8(""), type);
+ if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
+ dev->mAddress = String8("0");
+ }
+ add(dev);
} else {
sp<DeviceDescriptor> deviceDesc =
declaredDevices.getDeviceFromName(String8(devName));
diff --git a/services/audiopolicy/AudioPolicyManager.h b/services/audiopolicy/AudioPolicyManager.h
index 0ea7b97..50d7831 100644
--- a/services/audiopolicy/AudioPolicyManager.h
+++ b/services/audiopolicy/AudioPolicyManager.h
@@ -187,6 +187,7 @@ protected:
STRATEGY_SONIFICATION_RESPECTFUL,
STRATEGY_DTMF,
STRATEGY_ENFORCED_AUDIBLE,
+ STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
NUM_STRATEGIES
};
@@ -434,6 +435,8 @@ protected:
static const VolumeCurvePoint sHeadsetSystemVolumeCurve[AudioPolicyManager::VOLCNT];
static const VolumeCurvePoint sDefaultVoiceVolumeCurve[AudioPolicyManager::VOLCNT];
static const VolumeCurvePoint sSpeakerVoiceVolumeCurve[AudioPolicyManager::VOLCNT];
+ static const VolumeCurvePoint sLinearVolumeCurve[AudioPolicyManager::VOLCNT];
+ static const VolumeCurvePoint sSilentVolumeCurve[AudioPolicyManager::VOLCNT];
// default volume curves per stream and device category. See initializeVolumeCurves()
static const VolumeCurvePoint *sVolumeProfiles[AUDIO_STREAM_CNT][DEVICE_CATEGORY_CNT];
@@ -600,8 +603,10 @@ protected:
audio_io_handle_t output, audio_devices_t device);
// check that volume change is permitted, compute and send new volume to audio hardware
- status_t checkAndSetVolume(audio_stream_type_t stream, int index, audio_io_handle_t output,
- audio_devices_t device, int delayMs = 0, bool force = false);
+ virtual status_t checkAndSetVolume(audio_stream_type_t stream, int index,
+ audio_io_handle_t output,
+ audio_devices_t device,
+ int delayMs = 0, bool force = false);
// apply all stream volumes to the specified output and device
void applyStreamVolumes(audio_io_handle_t output, audio_devices_t device, int delayMs = 0, bool force = false);
@@ -806,6 +811,18 @@ protected:
sp<AudioPatch> mCallTxPatch;
sp<AudioPatch> mCallRxPatch;
+ // for supporting "beacon" streams, i.e. streams that only play on speaker, and never
+ // when something other than STREAM_TTS (a.k.a. "Transmitted Through Speaker") is playing
+ enum {
+ STARTING_OUTPUT,
+ STARTING_BEACON,
+ STOPPING_OUTPUT,
+ STOPPING_BEACON
+ };
+ uint32_t mBeaconMuteRefCount; // ref count for stream that would mute beacon
+ uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams
+ bool mBeaconMuted; // has STREAM_TTS been muted
+
#ifdef AUDIO_POLICY_TEST
Mutex mLock;
Condition mWaitWorkCV;
@@ -820,10 +837,9 @@ protected:
uint32_t mTestChannels;
uint32_t mTestLatencyMs;
#endif //AUDIO_POLICY_TEST
-
-private:
static float volIndexToAmpl(audio_devices_t device, const StreamDescriptor& streamDesc,
int indexInUi);
+private:
// updates device caching and output for streams that can influence the
// routing of notifications
void handleNotificationRoutingForStream(audio_stream_type_t stream);
@@ -851,6 +867,13 @@ private:
const audio_offload_info_t *offloadInfo);
// internal function to derive a stream type value from audio attributes
audio_stream_type_t streamTypefromAttributesInt(const audio_attributes_t *attr);
+ // return true if any output is playing anything besides the stream to ignore
+ bool isAnyOutputActive(audio_stream_type_t streamToIgnore);
+ // event is one of STARTING_OUTPUT, STARTING_BEACON, STOPPING_OUTPUT, STOPPING_BEACON
+ // returns 0 if no mute/unmute event happened, the largest latency of the device where
+ // the mute/unmute happened
+ uint32_t handleEventForBeacon(int event);
+ uint32_t setBeaconMute(bool mute);
};
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ec1a341..3bdeab2 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -420,12 +420,20 @@ void Camera2Client::disconnect() {
ALOGV("Camera %d: Waiting for threads", mCameraId);
- mStreamingProcessor->join();
- mFrameProcessor->join();
- mCaptureSequencer->join();
- mJpegProcessor->join();
- mZslProcessorThread->join();
- mCallbackProcessor->join();
+ {
+ // Don't wait with lock held, in case the other threads need to
+ // complete callbacks that re-enter Camera2Client
+ mBinderSerializationLock.unlock();
+
+ mStreamingProcessor->join();
+ mFrameProcessor->join();
+ mCaptureSequencer->join();
+ mJpegProcessor->join();
+ mZslProcessorThread->join();
+ mCallbackProcessor->join();
+
+ mBinderSerializationLock.lock();
+ }
ALOGV("Camera %d: Deleting streams", mCameraId);
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
index 0bfdfd4..5502dcb 100644
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
@@ -44,7 +44,7 @@ status_t BurstCapture::start(Vector<CameraMetadata> &/*metadatas*/,
return INVALID_OPERATION;
}
-void BurstCapture::onFrameAvailable() {
+void BurstCapture::onFrameAvailable(const BufferItem &/*item*/) {
ALOGV("%s", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
if(!mInputChanged) {
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.h b/services/camera/libcameraservice/api1/client2/BurstCapture.h
index ea321fd..c3b7722 100644
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.h
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.h
@@ -39,7 +39,7 @@ public:
BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
virtual ~BurstCapture();
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
virtual status_t start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId);
protected:
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index bf3318e..eadaa00 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -46,7 +46,7 @@ CallbackProcessor::~CallbackProcessor() {
deleteStream();
}
-void CallbackProcessor::onFrameAvailable() {
+void CallbackProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mCallbackAvailable) {
mCallbackAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 613f5be..7fdc329 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -44,7 +44,7 @@ class CallbackProcessor:
CallbackProcessor(sp<Camera2Client> client);
~CallbackProcessor();
- void onFrameAvailable();
+ void onFrameAvailable(const BufferItem& item);
// Set to NULL to disable the direct-to-app callback window
status_t setCallbackWindow(sp<ANativeWindow> callbackWindow);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 312a78c..40d53b3 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -168,6 +168,19 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
faceIds = entry.data.i32;
}
+ entry = frame.find(ANDROID_SCALER_CROP_REGION);
+ if (entry.count < 4) {
+ ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
+ __FUNCTION__, client->getCameraId(), entry.count);
+ return res;
+ }
+
+ Parameters::CropRegion scalerCrop = {
+ static_cast<float>(entry.data.i32[0]),
+ static_cast<float>(entry.data.i32[1]),
+ static_cast<float>(entry.data.i32[2]),
+ static_cast<float>(entry.data.i32[3])};
+
faces.setCapacity(metadata.number_of_faces);
size_t maxFaces = metadata.number_of_faces;
@@ -183,26 +196,30 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
camera_face_t face;
- face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
- face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
- face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
- face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
+ face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceRects[i*4 + 0], scalerCrop);
+ face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceRects[i*4 + 1], scalerCrop);
+ face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceRects[i*4 + 2], scalerCrop);
+ face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceRects[i*4 + 3], scalerCrop);
face.score = faceScores[i];
if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
face.id = faceIds[i];
- face.left_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
- face.left_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
- face.right_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
- face.right_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
- face.mouth[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
- face.mouth[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
+ face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 0], scalerCrop);
+ face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 1], scalerCrop);
+ face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 2], scalerCrop);
+ face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 3], scalerCrop);
+ face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 4], scalerCrop);
+ face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 5], scalerCrop);
} else {
face.id = 0;
face.left_eye[0] = face.left_eye[1] = -2000;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index b433781..2772267 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -51,7 +51,7 @@ JpegProcessor::~JpegProcessor() {
deleteStream();
}
-void JpegProcessor::onFrameAvailable() {
+void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mCaptureAvailable) {
mCaptureAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index b2c05df..2040b30 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -47,7 +47,7 @@ class JpegProcessor:
~JpegProcessor();
// CpuConsumer listener implementation
- void onFrameAvailable();
+ void onFrameAvailable(const BufferItem& item);
status_t updateStream(const Parameters &params);
status_t deleteStream();
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 7b90d28..74bbb9d 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -2619,75 +2619,71 @@ int Parameters::normalizedYToCrop(int y) const {
return (y + 1000) * (previewCrop.height - 1) / 2000;
}
-int Parameters::arrayXToCrop(int x) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return x - previewCrop.left;
-}
-
-int Parameters::arrayYToCrop(int y) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return y - previewCrop.top;
-}
+int Parameters::normalizedXToArray(int x) const {
-int Parameters::cropXToNormalized(int x) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return x * 2000 / (previewCrop.width - 1) - 1000;
-}
+ // Work-around for HAL pre-scaling the coordinates themselves
+ if (quirks.meteringCropRegion) {
+ return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000;
+ }
-int Parameters::cropYToNormalized(int y) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return y * 2000 / (previewCrop.height - 1) - 1000;
+ return cropXToArray(normalizedXToCrop(x));
}
-int Parameters::arrayXToNormalized(int width) const {
- int ret = cropXToNormalized(arrayXToCrop(width));
-
- ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of "
- "lower bounds %d", ret);
- ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of "
- "upper bounds %d", ret);
-
+int Parameters::normalizedYToArray(int y) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return width * 2000 / (fastInfo.arrayWidth - 1) - 1000;
+ return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000;
}
- return ret;
+ return cropYToArray(normalizedYToCrop(y));
}
-int Parameters::arrayYToNormalized(int height) const {
- int ret = cropYToNormalized(arrayYToCrop(height));
- ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of lower bounds"
- " %d", ret);
- ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of upper bounds"
- " %d", ret);
+Parameters::CropRegion Parameters::calculatePreviewCrop(
+ const CropRegion &scalerCrop) const {
+ float left, top, width, height;
+ float previewAspect = static_cast<float>(previewWidth) / previewHeight;
+ float cropAspect = scalerCrop.width / scalerCrop.height;
- // Work-around for HAL pre-scaling the coordinates themselves
- if (quirks.meteringCropRegion) {
- return height * 2000 / (fastInfo.arrayHeight - 1) - 1000;
+ if (previewAspect > cropAspect) {
+ width = scalerCrop.width;
+ height = cropAspect * scalerCrop.height / previewAspect;
+
+ left = scalerCrop.left;
+ top = scalerCrop.top + (scalerCrop.height - height) / 2;
+ } else {
+ width = previewAspect * scalerCrop.width / cropAspect;
+ height = scalerCrop.height;
+
+ left = scalerCrop.left + (scalerCrop.width - width) / 2;
+ top = scalerCrop.top;
}
- return ret;
-}
+ CropRegion previewCrop = {left, top, width, height};
-int Parameters::normalizedXToArray(int x) const {
+ return previewCrop;
+}
+int Parameters::arrayXToNormalizedWithCrop(int x,
+ const CropRegion &scalerCrop) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000;
+ return x * 2000 / (fastInfo.arrayWidth - 1) - 1000;
+ } else {
+ CropRegion previewCrop = calculatePreviewCrop(scalerCrop);
+ return (x - previewCrop.left) * 2000 / (previewCrop.width - 1) - 1000;
}
-
- return cropXToArray(normalizedXToCrop(x));
}
-int Parameters::normalizedYToArray(int y) const {
+int Parameters::arrayYToNormalizedWithCrop(int y,
+ const CropRegion &scalerCrop) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000;
+ return y * 2000 / (fastInfo.arrayHeight - 1) - 1000;
+ } else {
+ CropRegion previewCrop = calculatePreviewCrop(scalerCrop);
+ return (y - previewCrop.top) * 2000 / (previewCrop.height - 1) - 1000;
}
-
- return cropYToArray(normalizedYToCrop(y));
}
status_t Parameters::getFilteredSizes(Size limit, Vector<Size> *sizes) {
@@ -2954,6 +2950,10 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
if (!sensorSize.count) return NO_INIT;
+ camera_metadata_ro_entry_t pixelArraySize =
+ staticInfo(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2, 2);
+ if (!pixelArraySize.count) return NO_INIT;
+
float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
fastInfo.arrayHeight;
float stillAspect = static_cast<float>(pictureWidth) / pictureHeight;
@@ -3003,6 +3003,16 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
vertCropFactor = (arrayAspect < stillAspect) ?
(arrayAspect / stillAspect) : 1.f;
}
+
+ /**
+ * Convert the crop factors w.r.t the active array size to the crop factors
+ * w.r.t the pixel array size.
+ */
+ horizCropFactor *= (static_cast<float>(fastInfo.arrayWidth) /
+ pixelArraySize.data.i32[0]);
+ vertCropFactor *= (static_cast<float>(fastInfo.arrayHeight) /
+ pixelArraySize.data.i32[1]);
+
ALOGV("Horiz crop factor: %f, vert crop fact: %f",
horizCropFactor, vertCropFactor);
/**
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 815cc55..389cb92 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -325,13 +325,17 @@ struct Parameters {
// Note that this doesn't apply to the (deprecated) single FPS value.
static const int kFpsToApiScale = 1000;
- // Transform between (-1000,-1000)-(1000,1000) normalized coords from camera
- // API and HAL2 (0,0)-(activePixelArray.width/height) coordinates
- int arrayXToNormalized(int width) const;
- int arrayYToNormalized(int height) const;
+ // Transform from (-1000,-1000)-(1000,1000) normalized coords from camera
+ // API to HAL2 (0,0)-(activePixelArray.width/height) coordinates
int normalizedXToArray(int x) const;
int normalizedYToArray(int y) const;
+ // Transform from HAL3 (0,0)-(activePixelArray.width/height) coordinates to
+ // (-1000,-1000)-(1000,1000) normalized coordinates given a scaler crop
+ // region.
+ int arrayXToNormalizedWithCrop(int x, const CropRegion &scalerCrop) const;
+ int arrayYToNormalizedWithCrop(int y, const CropRegion &scalerCrop) const;
+
struct Range {
int min;
int max;
@@ -341,20 +345,20 @@ struct Parameters {
private:
- // Convert between HAL2 sensor array coordinates and
- // viewfinder crop-region relative array coordinates
+ // Convert from viewfinder crop-region relative array coordinates
+ // to HAL2 sensor array coordinates
int cropXToArray(int x) const;
int cropYToArray(int y) const;
- int arrayXToCrop(int x) const;
- int arrayYToCrop(int y) const;
- // Convert between viewfinder crop-region relative array coordinates
- // and camera API (-1000,1000)-(1000,1000) normalized coords
- int cropXToNormalized(int x) const;
- int cropYToNormalized(int y) const;
+ // Convert from camera API (-1000,1000)-(1000,1000) normalized coords
+ // to viewfinder crop-region relative array coordinates
int normalizedXToCrop(int x) const;
int normalizedYToCrop(int y) const;
+ // Given a scaler crop region, calculate preview crop region based on
+ // preview aspect ratio.
+ CropRegion calculatePreviewCrop(const CropRegion &scalerCrop) const;
+
Vector<Size> availablePreviewSizes;
Vector<Size> availableVideoSizes;
// Get size list (that are no larger than limit) from static metadata.
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 9e7fff8..470624b 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -635,7 +635,7 @@ status_t StreamingProcessor::incrementStreamingIds() {
return OK;
}
-void StreamingProcessor::onFrameAvailable() {
+void StreamingProcessor::onFrameAvailable(const BufferItem& /*item*/) {
ATRACE_CALL();
Mutex::Autolock l(mMutex);
if (!mRecordingFrameAvailable) {
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 8466af4..1d679a4 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -80,7 +80,7 @@ class StreamingProcessor:
status_t incrementStreamingIds();
// Callback for new recording frames from HAL
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// Callback from stagefright which returns used recording frames
void releaseRecordingFrame(const sp<IMemory>& mem);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8f78103..8b7e4b4 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -66,7 +66,7 @@ ZslProcessor::~ZslProcessor() {
disconnect();
}
-void ZslProcessor::onFrameAvailable() {
+void ZslProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mZslBufferAvailable) {
mZslBufferAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index b6533cf..2099c38 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -53,7 +53,7 @@ class ZslProcessor:
~ZslProcessor();
// From mZslConsumer
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// From FrameProcessor
virtual void onResultAvailable(const CaptureResult &result);
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 8caadd6..d1158d6 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -793,11 +793,6 @@ status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
mStreamSlotCount = 0;
return OK;
}
- camera_metadata_t *buf2 = clone_camera_metadata(buf);
- if (!buf2) {
- ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__);
- return NO_MEMORY;
- }
if (mStreamSlotCount > 1) {
List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin();
@@ -806,9 +801,9 @@ status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
}
if (mStreamSlotCount == 1) {
free_camera_metadata( *(mStreamSlot.begin()) );
- *(mStreamSlot.begin()) = buf2;
+ *(mStreamSlot.begin()) = buf;
} else {
- mStreamSlot.push_front(buf2);
+ mStreamSlot.push_front(buf);
mStreamSlotCount = 1;
}
return signalConsumerLocked();
@@ -827,12 +822,7 @@ status_t Camera2Device::MetadataQueue::setStreamSlot(
mStreamSlotCount = 0;
for (List<camera_metadata_t*>::const_iterator r = bufs.begin();
r != bufs.end(); r++) {
- camera_metadata_t *r2 = clone_camera_metadata(*r);
- if (!r2) {
- ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__);
- return NO_MEMORY;
- }
- mStreamSlot.push_back(r2);
+ mStreamSlot.push_back(*r);
mStreamSlotCount++;
}
return signalConsumerLocked();
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 2a3f1d9..4def8ae 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -124,8 +124,8 @@ class Camera2Device: public CameraDeviceBase {
// Set repeating buffer(s); if the queue is empty on a dequeue call, the
// queue copies the contents of the stream slot into the queue, and then
- // dequeues the first new entry. The metadata buffers passed in are
- // copied.
+ // dequeues the first new entry. The methods take the ownership of the
+ // metadata buffers passed in.
status_t setStreamSlot(camera_metadata_t *buf);
status_t setStreamSlot(const List<camera_metadata_t*> &bufs);
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index f8562ec..d0f29de 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -268,7 +268,7 @@ status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
return OK;
}
-void RingBufferConsumer::onFrameAvailable() {
+void RingBufferConsumer::onFrameAvailable(const android::BufferItem& item) {
status_t err;
{
@@ -321,7 +321,7 @@ void RingBufferConsumer::onFrameAvailable() {
item.mGraphicBuffer = mSlots[item.mBuf].mGraphicBuffer;
} // end of mMutex lock
- ConsumerBase::onFrameAvailable();
+ ConsumerBase::onFrameAvailable(item);
}
void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index da97a11..90fd734 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -165,7 +165,7 @@ class RingBufferConsumer : public ConsumerBase,
private:
// Override ConsumerBase::onFrameAvailable
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const android::BufferItem& item);
void pinBufferLocked(const BufferItem& item);
void unpinBuffer(const BufferItem& item);